def test_create_public_project_as_user_clone_as_user(self):
     """ Create public project as user then clone as user
     """
     pname = 'p_%s' % create_random_str()
     # create the project as admin
     self.create_project(pname, config.USER_2)
     # add user2 ssh pubkey to user2
     gu = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
     gu.add_pubkey(config.USER_2_PUB_KEY)
     # prepare to clone
     priv_key_path = set_private_key(config.USER_2_PRIV_KEY)
     self.dirs_to_delete.append(os.path.dirname(priv_key_path))
     ggu = GerritGitUtils(config.USER_2,
                          priv_key_path,
                          config.USERS[config.USER_2]['email'])
     url = "ssh://%s@%s:29418/%s" % (config.USER_2,
                                     config.GATEWAY_HOST, pname)
     # clone
     clone_dir = ggu.clone(url, pname)
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
     # Test that the clone is a success
     self.assertTrue(os.path.isdir(clone_dir))
     # Verify master own the .gitreview file
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 '.gitreview')))
 def test_init_user_tests(self):
     """ Check if a test init feature behave as expected
     """
     project = "p_%s" % create_random_str()
     self.create_project(project, config.USER_4)
     self.msu.create_init_tests(project, config.USER_4)
     ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]["email"])
     open_reviews = ggu.list_open_reviews("config", config.GATEWAY_HOST)
     match = [
         True
         for review in open_reviews
         if review["commitMessage"].startswith(
             "%s proposes initial test " "definition for project %s" % (config.USER_4, project)
         )
     ]
     self.assertEqual(len(match), 1)
     open_reviews = ggu.list_open_reviews(project, config.GATEWAY_HOST)
     match = [
         True
         for review in open_reviews
         if review["commitMessage"].startswith(
             "%s proposes initial test " "scripts for project %s" % (config.USER_4, project)
         )
     ]
     self.assertEqual(len(match), 1)
 def test_create_private_project_as_admin_clone_as_admin(self):
     """ Clone private project as admin and check content
     """
     pname = 'p_%s' % create_random_str()
     options = {"private": ""}
     self.create_project(pname, config.ADMIN_USER, options=options)
     ggu = GerritGitUtils(config.ADMIN_USER,
                          config.ADMIN_PRIV_KEY_PATH,
                          config.USERS[config.ADMIN_USER]['email'])
     url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                     config.GATEWAY_HOST, pname)
     clone_dir = ggu.clone(url, pname)
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
     # Test that the clone is a success
     self.assertTrue(os.path.isdir(clone_dir))
     # Verify master own the .gitreview file
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 '.gitreview')))
     # Verify meta/config branch own both group and ACLs config file
     ggu.fetch_meta_config(clone_dir)
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 'project.config')))
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 'groups')))
     # There is a group dev for a private project
     content = file(os.path.join(clone_dir, 'project.config')).read()
     self.assertTrue('%s-dev' % pname in content)
     content = file(os.path.join(clone_dir, 'groups')).read()
     self.assertTrue('%s-dev' % pname in content)
 def test_create_public_project_as_user_clone_as_user(self):
     """ Create public project as user then clone as user
     """
     pname = 'p_%s' % create_random_str()
     # create the project as admin
     self.create_project(pname, config.USER_2)
     # add user2 ssh pubkey to user2
     gu = GerritUtils(
         'https://%s/' % config.GATEWAY_HOST,
         auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
     gu.add_pubkey(config.USER_2_PUB_KEY)
     # prepare to clone
     priv_key_path = set_private_key(config.USER_2_PRIV_KEY)
     self.dirs_to_delete.append(os.path.dirname(priv_key_path))
     ggu = GerritGitUtils(config.USER_2,
                          priv_key_path,
                          config.USERS[config.USER_2]['email'])
     url = "ssh://%s@%s:29418/%s" % (config.USER_2,
                                     config.GATEWAY_HOST, pname)
     # clone
     clone_dir = ggu.clone(url, pname)
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
     # Test that the clone is a success
     self.assertTrue(os.path.isdir(clone_dir))
     # Verify master own the .gitreview file
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 '.gitreview')))
Esempio n. 5
0
    def test_review_labels(self):
        """ Test if list of review labels are as expected
        """
        pname = 'p_%s' % create_random_str()
        self.create_project(pname)
        un = config.ADMIN_USER
        gu = GerritUtils(config.GATEWAY_URL,
                         auth_cookie=config.USERS[un]['auth_cookie'])
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        self.assertTrue(gu.project_exists(pname))
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]

        labels = gu.get_labels_list_for_change(change_id)

        self.assertIn('Workflow', labels)
        self.assertIn('Code-Review', labels)
        self.assertIn('Verified', labels)
        self.assertEqual(len(labels.keys()), 3)

        gu.del_pubkey(k_index)
Esempio n. 6
0
 def setUp(self):
     super(TestProjectTestsWorkflow, self).setUp()
     self.projects = []
     self.dirs_to_delete = []
     self.un = config.ADMIN_USER
     self.gu = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[self.un]['auth_cookie'])
     self.gu2 = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
     self.ju = JenkinsUtils()
     self.gu.add_pubkey(config.USERS[self.un]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
     self.gitu_admin = GerritGitUtils(self.un,
                                      priv_key_path,
                                      config.USERS[self.un]['email'])
     # Clone the config repo and keep job/zuul config content
     self.config_clone_dir = self.clone_as_admin("config")
     self.original_zuul_projects = file(os.path.join(
         self.config_clone_dir, "zuul/projects.yaml")).read()
     self.original_project = file(os.path.join(
         self.config_clone_dir, "jobs/projects.yaml")).read()
     self.need_restore_config_repo = False
     # Put USER_2 as core for config project
     self.gu.add_group_member(config.USER_2, "config-core")
 def setUp(self):
     self.projects = []
     self.dirs_to_delete = []
     self.un = config.ADMIN_USER
     self.gu = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[self.un]['auth_cookie'])
     self.gu2 = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
     self.ju = JenkinsUtils()
     self.gu.add_pubkey(config.USERS[self.un]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
     self.gitu_admin = GerritGitUtils(self.un,
                                      priv_key_path,
                                      config.USERS[self.un]['email'])
     # Clone the config repo and make change to it
     # in order to test the new sample_project
     self.config_clone_dir = self.clone_as_admin("config")
     self.original_layout = file(os.path.join(
         self.config_clone_dir, "zuul/layout.yaml")).read()
     self.original_zuul_projects = file(os.path.join(
         self.config_clone_dir, "zuul/projects.yaml")).read()
     self.original_project = file(os.path.join(
         self.config_clone_dir, "jobs/projects.yaml")).read()
     # Put USER_2 as core for config project
     self.gu.add_group_member(config.USER_2, "config-core")
    def _prepare_review_submit_testing(self, project_options=None):
        if project_options is None:
            u2mail = config.USERS[config.USER_2]['email']
            project_options = {'core-group': u2mail}
        pname = 'p_%s' % create_random_str()
        self.create_project(pname, project_options)
        un = config.ADMIN_USER
        gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[un]['auth_cookie'])
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        self.assertTrue(gu.project_exists(pname))
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un,
                              priv_key_path,
                              config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]

        return change_id, gu, k_index
    def test_review_labels(self):
        """ Test if list of review labels are as expected
        """
        pname = 'p_%s' % create_random_str()
        self.create_project(pname)
        un = config.ADMIN_USER
        gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[un]['auth_cookie'])
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        self.assertTrue(gu.project_exists(pname))
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un,
                              priv_key_path,
                              config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]

        labels = gu.get_labels_list_for_change(change_id)

        self.assertIn('Workflow', labels)
        self.assertIn('Code-Review', labels)
        self.assertIn('Verified', labels)
        self.assertEqual(len(labels.keys()), 3)

        gu.del_pubkey(k_index)
Esempio n. 10
0
    def test_basic_ops_project_namespace(self):
        """ Check if a project named with a / (namespace) is handled
        correctly on basic ops by managesf
        """
        pname = 'skydive/%s' % create_random_str()
        self.create_project(pname, config.USER_2)
        self.assertTrue(self.gu.project_exists(pname))
        self.assertTrue(self.gu.group_exists('%s-ptl' % pname))
        if is_present("redmine"):
            rname = '_'.join(pname.split('/'))
            self.assertTrue(self.rm.project_exists(rname))
        # Try to clone
        ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH,
                             config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST,
                                        pname)
        clone_dir = ggu.clone(url, pname.split('/')[-1])
        self.dirs_to_delete.append(os.path.dirname(clone_dir))
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(clone_dir))
        # Verify master own the .gitreview file
        self.assertTrue(os.path.isfile(os.path.join(clone_dir, '.gitreview')))
        # Delete the project from SF
        self.msu.deleteProject(pname, config.ADMIN_USER)
        self.assertFalse(self.gu.project_exists(pname))
        self.assertFalse(self.gu.group_exists('%s-ptl' % pname))
        if has_issue_tracker():
            rname = '_'.join(pname.split('/'))
            self.assertFalse(self.rm.project_exists(rname))
        self.assertFalse(self.gu.group_exists('%s-core' % pname))

        # Clean local clone directory
        self.projects.remove(pname)
Esempio n. 11
0
 def setUp(self):
     super(TestLogExportedInElasticSearch, self).setUp()
     self.un = config.ADMIN_USER
     self.priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
     self.gitu_admin = GerritGitUtils(self.un,
                                      self.priv_key_path,
                                      config.USERS[self.un]['email'])
    def test_basic_ops_project_namespace(self):
        """ Check if a project named with a / (namespace) is handled
        correctly on basic ops by managesf
        """
        pname = 'skydive/%s' % create_random_str()
        self.create_project(pname, config.USER_2)
        self.assertTrue(self.gu.project_exists(pname))
        self.assertTrue(self.gu.group_exists('%s-ptl' % pname))
        if is_present("SFRedmine"):
            rname = '_'.join(pname.split('/'))
            self.assertTrue(self.rm.project_exists(rname))
        # Try to clone
        ggu = GerritGitUtils(config.ADMIN_USER,
                             config.ADMIN_PRIV_KEY_PATH,
                             config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, pname)
        clone_dir = ggu.clone(url, pname.split('/')[-1])
        self.dirs_to_delete.append(os.path.dirname(clone_dir))
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(clone_dir))
        # Verify master own the .gitreview file
        self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                    '.gitreview')))
        # Delete the project from SF
        self.msu.deleteProject(pname, config.ADMIN_USER)
        self.assertFalse(self.gu.project_exists(pname))
        self.assertFalse(self.gu.group_exists('%s-ptl' % pname))
        if is_present("SFRedmine"):
            rname = '_'.join(pname.split('/'))
            self.assertFalse(self.rm.project_exists(rname))
        self.assertFalse(self.gu.group_exists('%s-core' % pname))

        # Clean local clone directory
        self.projects.remove(pname)
 def test_create_private_project_as_admin_clone_as_admin(self):
     """ Clone private project as admin and check content
     """
     pname = 'p_%s' % create_random_str()
     options = {"private": ""}
     self.create_project(pname, config.ADMIN_USER, options=options)
     ggu = GerritGitUtils(config.ADMIN_USER,
                          config.ADMIN_PRIV_KEY_PATH,
                          config.USERS[config.ADMIN_USER]['email'])
     url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                     config.GATEWAY_HOST, pname)
     clone_dir = ggu.clone(url, pname)
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
     # Test that the clone is a success
     self.assertTrue(os.path.isdir(clone_dir))
     # Verify master own the .gitreview file
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 '.gitreview')))
     # Verify meta/config branch own both group and ACLs config file
     ggu.fetch_meta_config(clone_dir)
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 'project.config')))
     self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                 'groups')))
     # There is a group dev for a private project
     content = file(os.path.join(clone_dir, 'project.config')).read()
     self.assertTrue('%s-dev' % pname in content)
     content = file(os.path.join(clone_dir, 'groups')).read()
     self.assertTrue('%s-dev' % pname in content)
Esempio n. 14
0
 def __init__(self):
     with open("%s/resources.yaml" % pwd, 'r') as rsc:
         self.resources = yaml.load(rsc)
     config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
         config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
     self.msu = ManageSfUtils(config.GATEWAY_URL)
     self.ggu = GerritGitUtils(config.ADMIN_USER,
                               config.ADMIN_PRIV_KEY_PATH,
                               config.USERS[config.ADMIN_USER]['email'])
     self.ju = JenkinsUtils()
     self.rm = RedmineUtils(
         config.GATEWAY_URL + "/redmine/",
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
Esempio n. 15
0
    def setUp(self):
        super(TestRepoxplorer, self).setUp()
        priv_key_path = set_private_key(
            config.USERS[config.ADMIN_USER]["privkey"])
        self.gitu_admin = GerritGitUtils(
            config.ADMIN_USER, priv_key_path,
            config.USERS[config.ADMIN_USER]['email'])
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.ju = JenkinsUtils()

        self.dirs_to_delete = []
Esempio n. 16
0
 def __init__(self):
     with open("%s/resources.yaml" % pwd, 'r') as rsc:
         self.resources = yaml.load(rsc)
     config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
         config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
     self.msu = ManageSfUtils(config.GATEWAY_URL)
     self.ru = ResourcesUtils()
     self.ggu = GerritGitUtils(config.ADMIN_USER,
                               config.ADMIN_PRIV_KEY_PATH,
                               config.USERS[config.ADMIN_USER]['email'])
     self.stb_client = SFStoryboard(
         config.GATEWAY_URL + "/storyboard_api",
         config.USERS[config.ADMIN_USER]['auth_cookie'])
Esempio n. 17
0
 def test_01_validate_gerrit_project_acls(self):
     """ Verify the correct behavior of ACLs set on
     gerrit project
     """
     pname = "TestProjectACL"
     self.createProject(pname)
     un = config.ADMIN_USER
     priv_key_path = set_private_key(config.USERS[un]["privkey"])
     gitu = GerritGitUtils(un,
                           priv_key_path,
                           config.USERS[un]['email'])
     url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST,
                                     pname)
     clone_dir = gitu.clone(url, pname)
     gitu.fetch_meta_config(clone_dir)
     with open(os.path.join(clone_dir,
                            'project.config')) as project_config:
         p_config = parse_project_config(project_config)
     ptl = pname + "-ptl"
     core = pname + "-core"
     self.assertTrue('access "refs/*"' in p_config.keys(),
                     repr(p_config))
     self.assertTrue('access "refs/heads/*"' in p_config.keys(),
                     repr(p_config))
     self.assertTrue('access "refs/meta/config"' in p_config.keys(),
                     repr(p_config))
     self.assertTrue(any(ptl in l
                         for l in p_config['access "refs/*"']['owner']),
                     repr(p_config))
     self.assertTrue(any(core in l
                         for l in p_config['access "refs/*"']['read']),
                     repr(p_config))
     heads = p_config['access "refs/heads/*"']
     self.assertTrue(any(core in l
                         for l in heads['label-Code-Review']),
                     repr(p_config))
     self.assertTrue(any(core in l
                         for l in heads['label-Workflow']),
                     repr(p_config))
     self.assertTrue(any(ptl in l
                         for l in heads['label-Verified']),
                     repr(p_config))
     self.assertTrue(any(ptl in l
                         for l in heads['submit']),
                     repr(p_config))
     self.assertTrue(any(core in l
                         for l in heads['read']),
                     repr(p_config))
     # no need to test ref/meta/config, we could not test is if we
     # could not access it to begin with
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
Esempio n. 18
0
 def setUp(self):
     super(TestGerritHooks, self).setUp()
     self.projects = []
     self.dirs_to_delete = []
     self.issues = []
     self.u = config.ADMIN_USER
     self.rm = get_issue_tracker_utils(
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
     self.gu = GerritUtils(config.GATEWAY_URL,
                           auth_cookie=config.USERS[self.u]['auth_cookie'])
     self.gu.add_pubkey(config.USERS[self.u]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.u]["privkey"])
     self.gitu = GerritGitUtils(self.u, priv_key_path,
                                config.USERS[self.u]['email'])
    def test_check_config_repo_exists(self):
        """ Validate config repo has been bootstraped
        """
        pname = "config"
        gu = GerritUtils(config.GATEWAY_URL, auth_cookie=config.USERS[config.ADMIN_USER]["auth_cookie"])
        self.assertTrue(gu.project_exists(pname))

        ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]["email"])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname)
        clone_dir = ggu.clone(url, pname)
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(clone_dir))
        # Check if the clone dir has projects file
        self.assertTrue(os.path.isfile(os.path.join(clone_dir, "jobs/projects.yaml")))
Esempio n. 20
0
    def test_upstream(self):
        """ Validate upstream feature of managesf
        """
        # Create a test upstream project
        pname_us = 'p_upstream'
        self.create_project(pname_us, config.ADMIN_USER)

        ggu_us = GerritGitUtils(config.ADMIN_USER,
                                config.ADMIN_PRIV_KEY_PATH,
                                config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, pname_us)
        # clone
        us_clone_dir = ggu_us.clone(url, pname_us)
        self.dirs_to_delete.append(os.path.dirname(us_clone_dir))
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(us_clone_dir))
        # push some test files to the upstream project
        us_files = [str(x) for x in range(1, 10)]
        for f in us_files:
            file(os.path.join(us_clone_dir, f), 'w').write(f)
            os.chmod(os.path.join(us_clone_dir, f), 0755)

        ggu_us.add_commit_in_branch(us_clone_dir, "master",
                                    commit="Adding files 1-10",
                                    files=us_files)
        ggu_us.direct_push_branch(us_clone_dir, "master")

        # No create a test project with upstream pointing to the above
        upstream_url = "ssh://%s@%s:29418/%s" % (
            config.ADMIN_USER, config.GATEWAY_HOST, pname_us)
        pname = 'p_%s' % create_random_str()
        # create the project as admin
        options = {"upstream": upstream_url,
                   "upstream-ssh-key": config.ADMIN_PRIV_KEY_PATH}
        self.create_project(pname, config.ADMIN_USER, options=options)

        ggu = GerritGitUtils(config.ADMIN_USER,
                             config.ADMIN_PRIV_KEY_PATH,
                             config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, pname)
        # clone
        clone_dir = ggu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        # Check if the files pushed in upstream project is present
        files = [f for f in os.listdir(clone_dir) if not f.startswith('.')]
        self.assertEqual(set(files), set(us_files))
Esempio n. 21
0
 def setUp(self):
     self.msu = ManageSfUtils(config.GATEWAY_URL)
     self.un = config.ADMIN_USER
     self.gu = GerritUtils(config.GATEWAY_URL,
                           auth_cookie=config.USERS[self.un]['auth_cookie'])
     self.gu2 = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
     self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
     self.gitu_admin = GerritGitUtils(self.un, priv_key_path,
                                      config.USERS[self.un]['email'])
     # Configuration to access mirror repo present in managesf
     self.managesf_repo_path = "ssh://%s@%s/home/gerrit/git/" % (
         config.GERRIT_USER, config.GATEWAY_HOST)
     # prepare environment for git clone on mirror repo
     self.mt = Tool()
     self.mt_tempdir = tempfile.mkdtemp()
     priv_key = file(config.GERRIT_SERVICE_PRIV_KEY_PATH, 'r').read()
     priv_key_path = os.path.join(self.mt_tempdir, 'user.priv')
     file(priv_key_path, 'w').write(priv_key)
     os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE)
     ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % (
         priv_key_path)
     wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh')
     file(wrapper_path, 'w').write(ssh_wrapper)
     os.chmod(wrapper_path, stat.S_IRWXU)
     self.mt.env['GIT_SSH'] = wrapper_path
     self.pname = 'test-replication'
Esempio n. 22
0
    def test_check_add_automatic_reviewers(self):
        """ Test if reviewers-by-blame plugin works
        """
        data = "this\nis\na\ncouple\nof\nlines"
        change_id, gu, k1_index, pname = self._prepare_review_submit_testing(
            ('file', data))

        # Merge the change
        gu.submit_change_note(change_id, "current", "Code-Review", "2")
        gu.submit_change_note(change_id, "current", "Verified", "2")
        gu.submit_change_note(change_id, "current", "Workflow", "1")
        self.assertTrue(gu.submit_patch(change_id, "current"))

        gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
        # Change the file we have commited with Admin user
        k2_index = gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"])
        priv_key_path = set_private_key(config.USERS[config.USER_2]["privkey"])
        gitu2 = GerritGitUtils(config.USER_2, priv_key_path,
                               config.USERS[config.USER_2]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.USER_2, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu2.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))
        data = ['this', 'is', 'some', 'lines']
        file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data))
        gitu2.add_commit_and_publish(clone_dir,
                                     "master",
                                     "Test commit",
                                     fnames=["file"])
        # Get the change id
        change_ids = gu2.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]
        # Verify first_u has been automatically added to reviewers
        for retry in xrange(3):
            if len(gu2.get_reviewers(change_id)) > 0:
                break
            time.sleep(1)
        reviewers = gu2.get_reviewers(change_id)
        self.assertEqual(len(reviewers), 1)
        self.assertEqual(reviewers[0], config.ADMIN_USER)

        gu.del_pubkey(k1_index)
        gu2.del_pubkey(k2_index)
    def test_check_download_commands(self):
        """ Test if download commands plugin works
        """
        pname = 'p_%s' % create_random_str()
        self.create_project(pname)
        un = config.ADMIN_USER
        gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[un]['auth_cookie'])
        self.assertTrue(gu.project_exists(pname))
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un,
                              priv_key_path,
                              config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]
        resp = gu.get_change_last_patchset(change_id)
        self.assertIn("current_revision", resp)
        self.assertIn("revisions", resp)
        current_rev = resp["current_revision"]
        fetch = resp["revisions"][current_rev]["fetch"]
        self.assertGreater(fetch.keys(), 0)

        # disable and check if the fetch has anything
        gu.e_d_plugin("download-commands", 'disable')
        resp = gu.get_change_last_patchset(change_id)
        fetch = resp["revisions"][current_rev]["fetch"]
        self.assertEqual(len(fetch.keys()), 0)

        # enable the plugin and check if the fetch information is valid
        gu.e_d_plugin("download-commands", 'enable')
        resp = gu.get_change_last_patchset(change_id)
        fetch = resp["revisions"][current_rev]["fetch"]
        self.assertGreater(len(fetch.keys()), 0)

        gu.del_pubkey(k_index)
Esempio n. 24
0
    def _prepare_review_submit_testing(self, data=None):
        pname = 'p_%s' % create_random_str()
        self.create_project(pname)
        gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        k_index = gu.add_pubkey(config.USERS[config.ADMIN_USER]["pubkey"])
        self.assertTrue(gu.project_exists(pname))
        priv_key_path = set_private_key(
            config.USERS[config.ADMIN_USER]["privkey"])
        gitu = GerritGitUtils(config.ADMIN_USER, priv_key_path,
                              config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        if not data:
            gitu.add_commit_and_publish(clone_dir, "master", "Test commit")
        else:
            file(os.path.join(clone_dir, "file"), 'w').write(data[1])
            gitu.add_commit_and_publish(clone_dir,
                                        "master",
                                        "Test commit",
                                        fnames=[data[0]])

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]

        return change_id, gu, k_index, pname
 def setUp(self):
     self.projects = []
     self.dirs_to_delete = []
     self.issues = []
     self.u = config.ADMIN_USER
     self.u2 = config.USER_2
     self.rm = RedmineUtils(
         config.GATEWAY_URL + "/redmine/",
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
     self.gu = GerritUtils(config.GATEWAY_URL,
                           auth_cookie=config.USERS[self.u]['auth_cookie'])
     self.gu2 = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[self.u2]['auth_cookie'])
     self.gu.add_pubkey(config.USERS[self.u]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.u]["privkey"])
     self.gitu = GerritGitUtils(self.u, priv_key_path,
                                config.USERS[self.u]['email'])
    def test_check_config_repo_exists(self):
        pname = 'config'
        gu = GerritUtils(
            'https://%s/' % config.GATEWAY_HOST,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.assertTrue(gu.project_exists(pname))

        ggu = GerritGitUtils(config.ADMIN_USER,
                             config.ADMIN_PRIV_KEY_PATH,
                             config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, pname)
        clone_dir = ggu.clone(url, pname)
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(clone_dir))
        # Check if the clone dir has projects file
        self.assertTrue(os.path.isfile(os.path.join(clone_dir,
                                                    "jobs/projects.yaml")))
    def test_check_config_repo_exists(self):
        """ Validate config repo has been bootstraped
        """
        pname = 'config'
        gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.assertTrue(gu.project_exists(pname))

        ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH,
                             config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST,
                                        pname)
        clone_dir = ggu.clone(url, pname)
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(clone_dir))
        # Check if the clone dir has projects file
        self.assertTrue(
            os.path.isfile(os.path.join(clone_dir, "jobs/projects.yaml")))
Esempio n. 28
0
    def test_check_download_commands(self):
        """ Test if download commands plugin works
        """
        pname = 'p_%s' % create_random_str()
        self.create_project(pname)
        un = config.ADMIN_USER
        gu = GerritUtils(config.GATEWAY_URL,
                         auth_cookie=config.USERS[un]['auth_cookie'])
        self.assertTrue(gu.project_exists(pname))
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]
        resp = gu.get_change_last_patchset(change_id)
        self.assertIn("current_revision", resp)
        self.assertIn("revisions", resp)
        current_rev = resp["current_revision"]
        fetch = resp["revisions"][current_rev]["fetch"]
        self.assertGreater(fetch.keys(), 0)

        # disable and check if the fetch has anything
        gu.e_d_plugin("download-commands", 'disable')
        resp = gu.get_change_last_patchset(change_id)
        fetch = resp["revisions"][current_rev]["fetch"]
        self.assertEqual(len(fetch.keys()), 0)

        # enable the plugin and check if the fetch information is valid
        gu.e_d_plugin("download-commands", 'enable')
        resp = gu.get_change_last_patchset(change_id)
        fetch = resp["revisions"][current_rev]["fetch"]
        self.assertGreater(len(fetch.keys()), 0)

        gu.del_pubkey(k_index)
Esempio n. 29
0
 def test_init_user_tests(self):
     """ Check if a test init feature behave as expected
     """
     project = 'p_%s' % create_random_str()
     self.create_project(project, config.USER_4)
     self.msu.create_init_tests(project, config.USER_4)
     ggu = GerritGitUtils(config.ADMIN_USER,
                          config.ADMIN_PRIV_KEY_PATH,
                          config.USERS[config.ADMIN_USER]['email'])
     open_reviews = ggu.list_open_reviews('config', config.GATEWAY_HOST)
     match = [True for review in open_reviews if review['commitMessage'].
              startswith("%s proposes initial test "
                         "definition for project %s" %
                         (config.USER_4, project))]
     self.assertEqual(len(match), 1)
     open_reviews = ggu.list_open_reviews(project, config.GATEWAY_HOST)
     match = [True for review in open_reviews if review['commitMessage'].
              startswith("%s proposes initial test "
                         "scripts for project %s" %
                         (config.USER_4, project))]
     self.assertEqual(len(match), 1)
Esempio n. 30
0
    def setUp(self):
        super(TestProjectReplication, self).setUp()
        self.ru = ResourcesUtils()
        self.un = config.ADMIN_USER
        self.ju = JenkinsUtils()
        self.gu = GerritUtils(config.GATEWAY_URL,
                              auth_cookie=config.USERS[self.un]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
        self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        self.gitu_admin = GerritGitUtils(self.un, priv_key_path,
                                         config.USERS[self.un]['email'])

        # Prepare environment for git clone on mirror repo
        self.mt = Tool()
        self.mt_tempdir = tempfile.mkdtemp()
        # Copy the service private key in a flat file
        priv_key = file(config.SERVICE_PRIV_KEY_PATH, 'r').read()
        priv_key_path = os.path.join(self.mt_tempdir, 'user.priv')
        file(priv_key_path, 'w').write(priv_key)
        os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE)
        # Prepare the ssh wrapper script
        ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % (
            priv_key_path)
        wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh')
        file(wrapper_path, 'w').write(ssh_wrapper)
        os.chmod(wrapper_path, stat.S_IRWXU)
        # Set the wrapper as GIT_SSH env variable
        self.mt.env['GIT_SSH'] = wrapper_path

        self.config_clone_dir = None

        # Project we are going to configure the replication for
        self.pname = 'test/replication'

        # Remove artifacts of previous run if any
        self.delete_config_section(self.un, self.pname)
        self.delete_mirror_repo(self.pname)
Esempio n. 31
0
 def test_create_public_project_as_admin_clone_as_admin(self):
     """ Clone public project as admin and check content
     """
     pname = "p_%s" % create_random_str()
     self.create_project(pname, config.ADMIN_USER)
     ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]["email"])
     url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname)
     clone_dir = ggu.clone(url, pname)
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
     # Test that the clone is a success
     self.assertTrue(os.path.isdir(clone_dir))
     # Verify master own the .gitreview file
     self.assertTrue(os.path.isfile(os.path.join(clone_dir, ".gitreview")))
     # Verify meta/config branch own both group and ACLs config file
     ggu.fetch_meta_config(clone_dir)
     self.assertTrue(os.path.isfile(os.path.join(clone_dir, "project.config")))
     self.assertTrue(os.path.isfile(os.path.join(clone_dir, "groups")))
     # There is no group dev for a public project
     content = file(os.path.join(clone_dir, "project.config")).read()
     self.assertFalse("%s-dev" % pname in content)
     content = file(os.path.join(clone_dir, "groups")).read()
     self.assertFalse("%s-dev" % pname in content)
Esempio n. 32
0
 def test_01_validate_gerrit_project_acls(self):
     """ Verify the correct behavior of ACLs set on
     gerrit project
     """
     pname = "TestProjectACL"
     self.createProject(pname)
     un = config.ADMIN_USER
     priv_key_path = set_private_key(config.USERS[un]["privkey"])
     gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email'])
     url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname)
     clone_dir = gitu.clone(url, pname)
     gitu.fetch_meta_config(clone_dir)
     with open(os.path.join(clone_dir, 'project.config')) as project_config:
         p_config = parse_project_config(project_config)
     ptl = pname + "-ptl"
     core = pname + "-core"
     self.assertTrue('access "refs/*"' in p_config.keys(), repr(p_config))
     self.assertTrue('access "refs/heads/*"' in p_config.keys(),
                     repr(p_config))
     self.assertTrue('access "refs/meta/config"' in p_config.keys(),
                     repr(p_config))
     self.assertTrue(
         any(ptl in l for l in p_config['access "refs/*"']['owner']),
         repr(p_config))
     self.assertTrue(
         any(core in l for l in p_config['access "refs/*"']['read']),
         repr(p_config))
     heads = p_config['access "refs/heads/*"']
     self.assertTrue(any(core in l for l in heads['label-Code-Review']),
                     repr(p_config))
     self.assertTrue(any(core in l for l in heads['label-Workflow']),
                     repr(p_config))
     self.assertTrue(any(ptl in l for l in heads['label-Verified']),
                     repr(p_config))
     self.assertTrue(any(ptl in l for l in heads['submit']), repr(p_config))
     self.assertTrue(any(core in l for l in heads['read']), repr(p_config))
     # no need to test ref/meta/config, we could not test is if we
     # could not access it to begin with
     self.dirs_to_delete.append(os.path.dirname(clone_dir))
Esempio n. 33
0
 def __init__(self):
     with open('resources.yaml', 'r') as rsc:
         self.resources = yaml.load(rsc)
     config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
         config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
     self.msu = ManageSfUtils(config.GATEWAY_URL)
     self.ggu = GerritGitUtils(config.ADMIN_USER,
                               config.ADMIN_PRIV_KEY_PATH,
                               config.USERS[config.ADMIN_USER]['email'])
     self.ju = JenkinsUtils()
     self.rm = RedmineUtils(
         config.REDMINE_URL,
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
    def test_upstream(self):
        """ Validate upstream feature of managesf
        """
        # Create a test upstream project
        pname_us = 'p_upstream'
        self.create_project(pname_us, config.ADMIN_USER)

        ggu_us = GerritGitUtils(config.ADMIN_USER,
                                config.ADMIN_PRIV_KEY_PATH,
                                config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, pname_us)
        # clone
        us_clone_dir = ggu_us.clone(url, pname_us)
        self.dirs_to_delete.append(os.path.dirname(us_clone_dir))
        # Test that the clone is a success
        self.assertTrue(os.path.isdir(us_clone_dir))
        # push some test files to the upstream project
        us_files = [str(x) for x in range(1, 10)]
        for f in us_files:
            file(os.path.join(us_clone_dir, f), 'w').write(f)
            os.chmod(os.path.join(us_clone_dir, f), 0755)

        ggu_us.add_commit_in_branch(us_clone_dir, "master",
                                    commit="Adding files 1-10",
                                    files=us_files)
        ggu_us.direct_push_branch(us_clone_dir, "master")

        # No create a test project with upstream pointing to the above
        upstream_url = "ssh://%s@%s:29418/%s" % (
            config.ADMIN_USER, config.GATEWAY_HOST, pname_us)
        pname = 'p_%s' % create_random_str()
        # create the project as admin
        options = {"upstream": upstream_url,
                   "upstream-ssh-key": config.ADMIN_PRIV_KEY_PATH}
        self.create_project(pname, config.ADMIN_USER, options=options)

        ggu = GerritGitUtils(config.ADMIN_USER,
                             config.ADMIN_PRIV_KEY_PATH,
                             config.USERS[config.ADMIN_USER]['email'])
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, pname)
        # clone
        clone_dir = ggu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        # Check if the files pushed in upstream project is present
        files = [f for f in os.listdir(clone_dir) if not f.startswith('.')]
        self.assertEqual(set(files), set(us_files))
Esempio n. 35
0
    def _prepare_review_submit_testing(self, project_options=None):
        if project_options is None:
            u2mail = config.USERS[config.USER_2]['email']
            project_options = {'core-group': u2mail}
        pname = 'p_%s' % create_random_str()
        self.create_project(pname, project_options)
        un = config.ADMIN_USER
        gu = GerritUtils(config.GATEWAY_URL,
                         auth_cookie=config.USERS[un]['auth_cookie'])
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        self.assertTrue(gu.project_exists(pname))
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]

        return change_id, gu, k_index
Esempio n. 36
0
 def __init__(self):
     with open("%s/resources.yaml" % pwd, 'r') as rsc:
         self.resources = yaml.load(rsc)
     config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
         config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
     self.gu = GerritUtils(
         'http://%s/' % config.GATEWAY_HOST,
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
     self.ggu = GerritGitUtils(config.ADMIN_USER,
                               config.ADMIN_PRIV_KEY_PATH,
                               config.USERS[config.ADMIN_USER]['email'])
     self.ju = JenkinsUtils()
     self.rm = RedmineUtils(
         config.GATEWAY_URL + "/redmine/",
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
 def setUp(self):
     self.projects = []
     self.dirs_to_delete = []
     self.issues = []
     self.u = config.ADMIN_USER
     self.u2 = config.USER_2
     self.rm = get_issue_tracker_utils(
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
     self.gu = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[self.u]['auth_cookie'])
     self.gu2 = GerritUtils(
         config.GATEWAY_URL,
         auth_cookie=config.USERS[self.u2]['auth_cookie'])
     self.gu.add_pubkey(config.USERS[self.u]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.u]["privkey"])
     self.gitu = GerritGitUtils(self.u,
                                priv_key_path,
                                config.USERS[self.u]['email'])
 def setUp(self):
     self.projects = []
     self.dirs_to_delete = []
     self.issues = []
     self.u = config.ADMIN_USER
     self.u2 = config.USER_2
     self.rm = RedmineUtils(
         config.REDMINE_URL,
         auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
     self.gu = GerritUtils(
         'https://%s/' % config.GATEWAY_HOST,
         auth_cookie=config.USERS[self.u]['auth_cookie'])
     self.gu2 = GerritUtils(
         'https://%s/' % config.GATEWAY_HOST,
         auth_cookie=config.USERS[self.u2]['auth_cookie'])
     self.gu.add_pubkey(config.USERS[self.u]["pubkey"])
     priv_key_path = set_private_key(config.USERS[self.u]["privkey"])
     self.gitu = GerritGitUtils(self.u,
                                priv_key_path,
                                config.USERS[self.u]['email'])
    def setUp(self):
        self.msu = ManageSfUtils(config.GATEWAY_URL)
        self.un = config.ADMIN_USER
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[self.un]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
        self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        self.gitu_admin = GerritGitUtils(self.un,
                                         priv_key_path,
                                         config.USERS[self.un]['email'])

        # Prepare environment for git clone on mirror repo
        self.mt = Tool()
        self.mt_tempdir = tempfile.mkdtemp()
        # Copy the service private key in a flat file
        priv_key = file(config.SERVICE_PRIV_KEY_PATH, 'r').read()
        priv_key_path = os.path.join(self.mt_tempdir, 'user.priv')
        file(priv_key_path, 'w').write(priv_key)
        os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE)
        # Prepare the ssh wrapper script
        ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % (
            priv_key_path)
        wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh')
        file(wrapper_path, 'w').write(ssh_wrapper)
        os.chmod(wrapper_path, stat.S_IRWXU)
        # Set the wrapper as GIT_SSH env variable
        self.mt.env['GIT_SSH'] = wrapper_path

        self.config_clone_dir = None

        # Project we are going to configure the replication for
        self.pname = 'test/replication'

        # Remove artifacts of previous run if any
        self.delete_config_section(self.un, self.pname)
        self.delete_mirror_repo(self.pname)
Esempio n. 40
0
class SFchecker:
    """ This checker is only intended for testin
    SF backup/restore and update. It checks that the user
    data defined in resourses.yaml are present on the SF.

    Those data must have been provisioned by SFProvisioner.
    """
    def __init__(self):
        with open("%s/resources.yaml" % pwd, 'r') as rsc:
            self.resources = yaml.load(rsc)
        config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
            config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
        self.gu = GerritUtils(
            'http://%s/' % config.GATEWAY_HOST,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.ggu = GerritGitUtils(config.ADMIN_USER,
                                  config.ADMIN_PRIV_KEY_PATH,
                                  config.USERS[config.ADMIN_USER]['email'])
        self.ju = JenkinsUtils()
        self.rm = RedmineUtils(
            config.GATEWAY_URL + "/redmine/",
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])

    def check_project(self, name):
        print " Check project %s exists ..." % name,
        if not self.gu.project_exists(name) or \
           not self.rm.project_exists(name):
            print "FAIL"
            exit(1)
        print "OK"

    def check_files_in_project(self, name, files):
        print " Check files(%s) exists in project ..." % ",".join(files),
        # TODO(fbo); use gateway host instead of gerrit host
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, name)
        clone_dir = self.ggu.clone(url, name, config_review=False)
        for f in files:
            if not os.path.isfile(os.path.join(clone_dir, f)):
                print "FAIL"
                exit(1)

    def check_issues_on_project(self, name, issues):
        print " Check that at least %s issues exists for that project ...," %\
            len(issues)
        current_issues = self.rm.get_issues_by_project(name)
        if len(current_issues) < len(issues):
            print "FAIL: expected %s, project has %s" % (
                len(issues), len(current_issues))
            exit(1)
        print "OK"

    def check_jenkins_jobs(self, name, jobnames):
        print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames),
        for jobname in jobnames:
            if not '%s_%s' % (name, jobname) in self.ju.list_jobs():
                print "FAIL"
                exit(1)
        print "OK"

    def check_reviews_on_project(self, name, issues):
        reviews = [i for i in issues if i['review']]
        print " Check that at least %s reviews exists for that project ..." %\
            len(reviews),
        pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST)
        if not len(pending_reviews) >= len(reviews):
            print "FAIL"
            exit(1)
        print "OK"

    def check_pads(self, amount):
        pass

    def check_pasties(self, amount):
        pass

    def checker(self):
        for project in self.resources['projects']:
            print "Check user datas for %s" % project['name']
            self.check_project(project['name'])
            self.check_files_in_project(project['name'],
                                        [f['name'] for f in project['files']])
            self.check_issues_on_project(project['name'], project['issues'])
            self.check_reviews_on_project(project['name'], project['issues'])
            self.check_jenkins_jobs(project['name'],
                                    [j['name'] for j in project['jobnames']])
        self.check_pads(2)
        self.check_pasties(2)
Esempio n. 41
0
class TestResourcesWorkflow(Base):
    def setUp(self):
        super(TestResourcesWorkflow, self).setUp()
        priv_key_path = set_private_key(
            config.USERS[config.ADMIN_USER]["privkey"])
        self.gitu_admin = GerritGitUtils(
            config.ADMIN_USER, priv_key_path,
            config.USERS[config.ADMIN_USER]['email'])
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.ju = JenkinsUtils()

        self.dirs_to_delete = []

    def tearDown(self):
        super(TestResourcesWorkflow, self).tearDown()
        for dirs in self.dirs_to_delete:
            shutil.rmtree(dirs)

    def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd):
        host = '%s@%s' % (user, host)
        sshcmd = [
            'ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no',
            '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host
        ]
        cmd = sshcmd + subcmd

        devnull = open(os.devnull, 'wb')
        p = Popen(cmd, stdout=devnull, stderr=devnull)
        return p.communicate(), p.returncode

    def clone_as_admin(self, pname):
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST,
                                        pname)
        clone_dir = self.gitu_admin.clone(url, pname)
        if os.path.dirname(clone_dir) not in self.dirs_to_delete:
            self.dirs_to_delete.append(os.path.dirname(clone_dir))
        return clone_dir

    def commit_direct_push_as_admin(self, clone_dir, msg):
        # Stage, commit and direct push the additions on master
        self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg)
        return self.gitu_admin.direct_push_branch(clone_dir, 'master')

    def set_resources_then_direct_push(self,
                                       fpath,
                                       resources=None,
                                       mode='add'):
        config_clone_dir = self.clone_as_admin("config")
        path = os.path.join(config_clone_dir, fpath)
        if mode == 'add':
            file(path, 'w').write(resources)
        elif mode == 'del':
            os.unlink(path)
        change_sha = self.commit_direct_push_as_admin(
            config_clone_dir, "Add new resources for functional tests")
        config_update_log = self.ju.wait_for_config_update(change_sha)
        self.assertIn("SUCCESS", config_update_log)

    def wait_for_jenkins_note(self, change_id):
        attempt = 0
        while "jenkins" not in self.gu.get_reviewers(change_id):
            if attempt >= 90:
                break
            time.sleep(1)
            attempt += 1

    def propose_resources_change_check_ci(self,
                                          fpath,
                                          resources=None,
                                          mode='add',
                                          expected_note=1,
                                          msg=None):

        config_clone_dir = self.clone_as_admin("config")
        path = os.path.join(config_clone_dir, fpath)
        if mode == 'add':
            file(path, 'w').write(resources)
        elif mode == 'del':
            os.unlink(path)

        if not msg:
            msg = "Validate resources"
        if mode == 'add':
            change_sha = self.gitu_admin.add_commit_and_publish(
                config_clone_dir, "master", msg, fnames=[path])
        if mode == 'del':
            change_sha = self.gitu_admin.add_commit_for_all_new_additions(
                config_clone_dir, msg, publish=True)

        change_nr = self.gu.get_change_number(change_sha)
        note = self.gu.wait_for_verify(change_nr)
        self.assertEqual(note, expected_note)

    def get_resources(self):
        gateau = config.USERS[config.ADMIN_USER]['auth_cookie']
        resp = requests.get("%s/manage/resources/" % config.GATEWAY_URL,
                            cookies={'auth_pubtkt': gateau})
        return resp.json()

    def test_validate_wrong_resource_workflow(self):
        """ Check resources - wrong model is detected by config-check """
        # This resource is not correct
        fpath = "resources/%s.yaml" % create_random_str()
        name = create_random_str()
        resources = """resources:
  groups:
    %s:
      unknown-key: value
      description: test for functional test
"""
        # Add the resource file with review then check CI
        resources = resources % name
        self.propose_resources_change_check_ci(fpath,
                                               resources=resources,
                                               mode='add',
                                               expected_note=-1)

    def test_validate_correct_resource_workflow(self):
        """ Check resources - good model is detected by config-check """
        # This resource is correct
        fpath = "resources/%s.yaml" % create_random_str()
        name = create_random_str()
        resources = """resources:
  groups:
    %s:
      description: test for functional test
      members:
        - [email protected]
"""
        # Add the resource file with review then check CI
        resources = resources % name
        self.propose_resources_change_check_ci(fpath,
                                               resources=resources,
                                               mode='add')

    def test_validate_resources_deletion(self):
        """ Check resources - deletions detected and authorized via flag """
        fpath = "resources/%s.yaml" % create_random_str()
        name = create_random_str()
        resources = """resources:
  groups:
    %s:
      description: test for functional test
      members: []
"""
        # Add the resources file w/o review
        resources = resources % name
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')

        # Remove the resource file via the review
        self.propose_resources_change_check_ci(fpath,
                                               mode='del',
                                               expected_note=-1)

        # Remove the resource file with "allow-delete" flag via the review
        shutil.rmtree(os.path.join(self.gitu_admin.tempdir, 'config'))
        msg = "Remove resource with flag\nsf-resources: allow-delete"
        self.propose_resources_change_check_ci(fpath, mode='del', msg=msg)

    @skipIfServiceMissing('storyboard')
    def test_CUD_project(self):
        """ Check resources - ops on project work as expected """
        sclient = SFStoryboard(config.GATEWAY_URL + "/storyboard_api",
                               config.USERS[config.USER_4]['auth_cookie'])
        fpath = "resources/%s.yaml" % create_random_str()
        name = create_random_str()
        resources = """resources:
  projects:
    %(pname)s:
      description: An awesome project
      issue-tracker: SFStoryboard
      source-repositories:
        - %(pname)s/%(r1name)s
  repos:
    %(pname)s/%(r1name)s:
      description: The server part
      acl: %(pname)s
  acls:
    %(pname)s:
      file: |
        [access "refs/*"]
          read = group Anonymous Users
"""
        tmpl_keys = {
            'pname': create_random_str(),
            'r1name': create_random_str()
        }

        resources = resources % tmpl_keys
        # Add the resources file w/o review
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')

        # Some checks to validate stuff have been created
        projects = [p.name for p in sclient.projects.get_all()]
        self.assertIn("%s/%s" % (tmpl_keys['pname'], tmpl_keys['r1name']),
                      projects)
        project_groups = [p.name for p in sclient.project_groups.get_all()]
        self.assertIn(tmpl_keys['pname'], project_groups)

        # Modify the project resource
        resources = """resources:
  projects:
    %(pname)s:
      description: An awesome project
      issue-tracker: SFStoryboard
      source-repositories:
        - %(pname)s/%(r1name)s
        - %(pname)s/%(r2name)s
  repos:
    %(pname)s/%(r1name)s:
      description: The server part
      acl: %(pname)s
    %(pname)s/%(r2name)s:
      description: The server part
      acl: %(pname)s
  acls:
    %(pname)s:
      file: |
        [access "refs/*"]
          read = group Anonymous Users
"""
        tmpl_keys.update({'r2name': create_random_str()})
        resources = resources % tmpl_keys
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        # Some checks to validate stuff have been updated
        projects = [p.name for p in sclient.projects.get_all()]
        for name in (tmpl_keys['r1name'], tmpl_keys['r2name']):
            self.assertIn("%s/%s" % (tmpl_keys['pname'], name), projects)
        project_groups = [p.name for p in sclient.project_groups.get_all()]
        self.assertIn(tmpl_keys['pname'], project_groups)

        # Del the resources file w/o review
        self.set_resources_then_direct_push(fpath, mode='del')

        # Check the project group has been deleted
        # Note the project (in storyboard) is not deleted
        # this is a current limitation of the API (01/13/2017)
        project_groups = [p.name for p in sclient.project_groups.get_all()]
        self.assertFalse(tmpl_keys['pname'] in project_groups)

    def test_CUD_group(self):
        """ Check resources - ops on group work as expected """
        fpath = "resources/%s.yaml" % create_random_str()
        name = create_random_str()
        resources = """resources:
  groups:
    %s:
      description: test for functional test
      members:
        - [email protected]
        - [email protected]
"""
        # Add the resources file w/o review
        resources = resources % name
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        # Check members on Gerrit
        gid = self.gu.get_group_id(name)
        members = [m['email'] for m in self.gu.get_group_members(gid)]
        self.assertIn("*****@*****.**", members)
        self.assertIn("*****@*****.**", members)
        # Modify resources Add/Remove members w/o review
        resources = """resources:
  groups:
    %s:
      description: test for functional test
      members:
        - [email protected]
        - [email protected]
"""
        resources = resources % name
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        # Check members on Gerrit
        gid = self.gu.get_group_id(name)
        members = [m['email'] for m in self.gu.get_group_members(gid)]
        self.assertIn("*****@*****.**", members)
        self.assertIn("*****@*****.**", members)
        self.assertNotIn("*****@*****.**", members)
        # Del the resources file w/o review
        self.set_resources_then_direct_push(fpath, mode='del')
        # Check the group has been deleted
        self.assertFalse(self.gu.get_group_id(name))

    def test_CD_repo(self):
        """ Check resources - ops on git repositories work as expected """
        fpath = "resources/%s.yaml" % create_random_str()
        name = create_random_str()
        resources = """resources:
  repos:
    %s:
      description: test for functional test
      default-branch: br1
      branches:
        br1: HEAD
        br2: HEAD
        master: '0'
"""
        # Add the resources file w/o review
        resources = resources % name
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        # Check the project has been created
        self.assertTrue(self.gu.project_exists(name))
        # Check branches
        branches = self.gu.g.get('/projects/%s/branches/' % name)
        for wref in ("HEAD", "br1", "br2"):
            found = False
            for ref in branches:
                if found:
                    continue
                if ref['ref'].endswith(wref):
                    found = True
                    if ref['ref'] == 'HEAD' and ref['revision'] != "br1":
                        raise Exception("Wrong default branch")
            if not found:
                raise Exception("Requested branch %s not found" % wref)
        # Del the resources file w/o review
        self.set_resources_then_direct_push(fpath, mode='del')
        # Check the project has been deleted
        self.assertFalse(self.gu.project_exists(name))

    def test_CRUD_resources(self):
        """ Check resources - bulk ops on resources work as expected """
        fpath = "resources/%s.yaml" % create_random_str()
        tmpl_keys = {
            'pname': create_random_str(),
            'r1name': create_random_str(),
            'r2name': create_random_str(),
            'aname': create_random_str(),
            'g1name': create_random_str(),
            'g2name': create_random_str()
        }
        resources = """resources:
  projects:
    %(pname)s:
      description: An awesome project
      contacts:
        - [email protected]
      source-repositories:
        - %(pname)s/%(r1name)s
        - %(pname)s/%(r2name)s
      website: http://ichiban-cloud.io
      documentation: http://ichiban-cloud.io/docs
      issue-tracker-url: http://ichiban-cloud.bugtrackers.io
  repos:
    %(pname)s/%(r1name)s:
      description: The server part
      acl: %(aname)s
    %(pname)s/%(r2name)s:
      description: The client part
      acl: %(aname)s
  acls:
    %(aname)s:
      file: |
        [access "refs/*"]
          read = group Anonymous Users
          read = group %(pname)s/%(g2name)s
          owner = group %(pname)s/%(g1name)s
        [access "refs/heads/*"]
          label-Code-Review = -2..+2 group %(pname)s/%(g2name)s
          label-Verified = -2..+2 group %(pname)s/%(g1name)s
          label-Workflow = -1..+1 group %(pname)s/%(g2name)s
          submit = group %(pname)s/%(g2name)s
          read = group Anonymous Users
          read = group %(pname)s/%(g2name)s
        [access "refs/meta/config"]
          read = group %(pname)s/%(g2name)s
        [receive]
          requireChangeId = true
        [submit]
          mergeContent = false
          action = fast forward only
      groups:
        - %(pname)s/%(g1name)s
        - %(pname)s/%(g2name)s
  groups:
    %(pname)s/%(g1name)s:
      members:
        - [email protected]
    %(pname)s/%(g2name)s:
      members:
        - [email protected]
        - [email protected]
"""
        # Add the resources file w/o review
        resources = resources % tmpl_keys
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        # Check resources have been created
        self.assertTrue(
            self.gu.project_exists(
                os.path.join(tmpl_keys['pname'], tmpl_keys['r1name'])))
        self.assertTrue(
            self.gu.project_exists(
                os.path.join(tmpl_keys['pname'], tmpl_keys['r2name'])))
        gid = self.gu.get_group_id(
            os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']))
        members = [m['email'] for m in self.gu.get_group_members(gid)]
        self.assertEqual(len(members), 1)
        self.assertIn("*****@*****.**", members)
        gid2 = self.gu.get_group_id(
            os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']))
        members = [m['email'] for m in self.gu.get_group_members(gid2)]
        self.assertEqual(len(members), 2)
        self.assertIn("*****@*****.**", members)
        self.assertIn("*****@*****.**", members)
        # Verify ACLs have been written for both repo
        for r in ('r1name', 'r2name'):
            rname = os.path.join(tmpl_keys['pname'], tmpl_keys[r])
            acl = self.gu.g.get('access/?project=%s' % rname)
            self.assertIn(
                gid2, acl[rname]['local']['refs/heads/*']['permissions']
                ['submit']['rules'].keys())
        # Verify the resources endpoint know about what we pushed
        res = self.get_resources()
        self.assertIn(tmpl_keys['pname'], res['resources']['projects'].keys())
        self.assertIn(tmpl_keys['aname'], res['resources']['acls'].keys())
        self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']),
                      res['resources']['groups'].keys())
        self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']),
                      res['resources']['groups'].keys())
        self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']),
                      res['resources']['repos'].keys())
        self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']),
                      res['resources']['repos'].keys())
        # Modify the ACL to verify repos ACL are updated
        resources = re.sub(
            'submit = group .*', 'submit = group %s' %
            os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), resources)
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        # Verify ACLs have been updated for both repo
        for r in ('r1name', 'r2name'):
            rname = os.path.join(tmpl_keys['pname'], tmpl_keys[r])
            acl = self.gu.g.get('access/?project=%s' % rname)
            self.assertIn(
                gid, acl[rname]['local']['refs/heads/*']['permissions']
                ['submit']['rules'].keys())
        # Now let's remove all that awesome resources
        self.set_resources_then_direct_push(fpath, mode='del')
        # Check resources have been deleted
        self.assertFalse(
            self.gu.project_exists(
                os.path.join(tmpl_keys['pname'], tmpl_keys['r1name'])))
        self.assertFalse(
            self.gu.project_exists(
                os.path.join(tmpl_keys['pname'], tmpl_keys['r2name'])))
        self.assertFalse(
            self.gu.get_group_id(
                os.path.join(tmpl_keys['pname'], tmpl_keys['g1name'])))
        self.assertFalse(
            self.gu.get_group_id(
                os.path.join(tmpl_keys['pname'], tmpl_keys['g2name'])))
        res = self.get_resources()
        projects = res['resources'].get('projects', {})
        acls = res['resources'].get('acls', {})
        groups = res['resources'].get('groups', {})
        repos = res['resources'].get('repos', {})
        self.assertNotIn(tmpl_keys['pname'], projects.keys())
        self.assertNotIn(tmpl_keys['aname'], acls.keys())
        self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']),
                         groups.keys())
        self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']),
                         groups.keys())
        self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']),
                         repos.keys())
        self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']),
                         repos.keys())

    def test_GET_resources(self):
        """ Check resources - GET resources works as expected"""
        cookies = dict(auth_pubtkt=config.USERS[config.USER_1]['auth_cookie'])
        ret = requests.get("%s/manage/resources/" % config.GATEWAY_URL,
                           cookies=cookies)
        self.assertIn('resources', ret.json())

    def test_GET_missing_resources(self):
        """ Check resources - GET missing resources works as expected"""
        token = config.USERS[config.ADMIN_USER]['auth_cookie']
        prev = "resources: {}"
        new = """resources:
  groups:
    %(gname)s:
      description: A test group
      members: ['*****@*****.**']
"""
        group_name = create_random_str()
        data = {'prev': prev, 'new': new % {'gname': group_name}}
        # Direct PUT resources bypassing the config repo workflow
        requests.put("%s/manage/resources/" % config.GATEWAY_URL,
                     json=data,
                     cookies={'auth_pubtkt': token})
        # Verify managesf detects diff and propose a re-sync resource struct
        ret = requests.get("%s/manage/resources/?get_missing_"
                           "resources=true" % config.GATEWAY_URL,
                           cookies={'auth_pubtkt': token})
        logs, resources = ret.json()
        self.assertListEqual(logs, [])
        self.assertIn(group_name, resources['resources']['groups'])
        # Call the resources.sh script on managesf node to propose
        # a review on the config repo to re-sync with the reality
        cmd = [
            '/usr/local/bin/resources.sh', 'get_missing_resources', 'submit'
        ]
        self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                         config.GATEWAY_HOST, cmd)
        # Get change id of the submitted review
        search_string = "Propose missing resources to the config repo"
        r = requests.get('%s/r/changes/?q=%s' %
                         (config.GATEWAY_URL, search_string))
        lastid = 0
        for r in json.loads(r.content[4:]):
            if r['_number'] > lastid:
                lastid = r['_number']
        self.assertEqual(self.gu.wait_for_verify(lastid), 1)
        # Check flag "sf-resources: skip-apply" in the commit msg
        change = self.gu.g.get(
            'changes/?q=%s&o=CURRENT_REVISION&o=CURRENT_COMMIT' % lastid)[0]
        revision = change["current_revision"]
        commit = change['revisions'][revision]["commit"]
        self.assertEqual(commit["message"].split('\n')[0],
                         'Propose missing resources to the config repo')
        self.assertTrue(commit["message"].find('sf-resources: skip-apply') > 0)
        # Approve the change and wait for the +2
        self.gu.submit_change_note(change['id'], "current", "Code-Review", "2")
        self.gu.submit_change_note(change['id'], "current", "Workflow", "1")
        # Check config-update return a success
        # The flag sf-resources: skip-apply should be detected
        # by the config update. Then missing resources won't
        # by concidered new and the resources apply will be skipped.
        # This tests (checking config-update succeed) confirm
        # resource apply have been skipped if not managesf resources
        # apply would have return 409 error making config-update failed too.
        # If not True then we cannot concider config-update succeed
        config_update_log = self.ju.wait_for_config_update(revision)
        self.assertIn("Skip resources apply.", config_update_log)
        self.assertIn("SUCCESS", config_update_log)
        # Checking again missing resources  must return nothing
        ret = requests.get("%s/manage/resources/?get_missing_"
                           "resources=true" % config.GATEWAY_URL,
                           cookies={'auth_pubtkt': token})
        logs, resources = ret.json()
        self.assertListEqual(logs, [])
        self.assertEqual(len(resources['resources']), 0)
Esempio n. 42
0
class SFchecker:
    """ This checker is only intended for testin
    SF backup/restore and update. It checks that the user
    data defined in resourses.yaml are present on the SF.

    Those data must have been provisioned by SFProvisioner.
    """
    def __init__(self):
        with open("%s/resources.yaml" % pwd, 'r') as rsc:
            self.resources = yaml.load(rsc)
        config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
            config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
        self.gu = GerritUtils(
            'http://%s/' % config.GATEWAY_HOST,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.ggu = GerritGitUtils(config.ADMIN_USER,
                                  config.ADMIN_PRIV_KEY_PATH,
                                  config.USERS[config.ADMIN_USER]['email'])
        self.ju = JenkinsUtils()
        self.rm = RedmineUtils(
            config.GATEWAY_URL + "/redmine/",
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])

    def check_project(self, name):
        print " Check project %s exists ..." % name,
        if not self.gu.project_exists(name) or \
           (is_present('SFRedmine') and not self.rm.project_exists(name)):
            print "FAIL"
            exit(1)
        print "OK"

    def check_files_in_project(self, name, files):
        print " Check files(%s) exists in project ..." % ",".join(files),
        # TODO(fbo); use gateway host instead of gerrit host
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST,
                                        name)
        clone_dir = self.ggu.clone(url, name, config_review=False)
        for f in files:
            if not os.path.isfile(os.path.join(clone_dir, f)):
                print "FAIL"
                exit(1)

    def check_issues_on_project(self, name, issues):
        print " Check that at least %s issues exists for that project ...," %\
            len(issues)
        current_issues = self.rm.get_issues_by_project(name)
        if len(current_issues) < len(issues):
            print "FAIL: expected %s, project has %s" % (len(issues),
                                                         len(current_issues))
            exit(1)
        print "OK"

    def check_jenkins_jobs(self, name, jobnames):
        print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames),
        for jobname in jobnames:
            if not '%s_%s' % (name, jobname) in self.ju.list_jobs():
                print "FAIL"
                exit(1)
        print "OK"

    def check_reviews_on_project(self, name, issues):
        reviews = [i for i in issues if i['review']]
        print " Check that at least %s reviews exists for that project ..." %\
            len(reviews),
        pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST)
        if not len(pending_reviews) >= len(reviews):
            print "FAIL"
            exit(1)
        print "OK"

    def check_pads(self, amount):
        pass

    def check_pasties(self, amount):
        pass

    def checker(self):
        for project in self.resources['projects']:
            print "Check user datas for %s" % project['name']
            self.check_project(project['name'])
            self.check_files_in_project(project['name'],
                                        [f['name'] for f in project['files']])
            if is_present('SFRedmine'):
                self.check_issues_on_project(project['name'],
                                             project['issues'])
            self.check_reviews_on_project(project['name'], project['issues'])
            self.check_jenkins_jobs(project['name'],
                                    [j['name'] for j in project['jobnames']])
        self.check_pads(2)
        self.check_pasties(2)
class TestGerritHooks(Base):
    """ Functional tests that validate Gerrit hooks.
    """
    @classmethod
    def setUpClass(cls):
        cls.msu = ManageSfUtils(config.GATEWAY_URL)

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.projects = []
        self.dirs_to_delete = []
        self.issues = []
        self.u = config.ADMIN_USER
        self.u2 = config.USER_2
        self.rm = RedmineUtils(
            config.GATEWAY_URL + "/redmine/",
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[self.u]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[self.u2]['auth_cookie'])
        self.gu.add_pubkey(config.USERS[self.u]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.u]["privkey"])
        self.gitu = GerritGitUtils(self.u,
                                   priv_key_path,
                                   config.USERS[self.u]['email'])

    def tearDown(self):
        for issue in self.issues:
            self.rm.delete_issue(issue)
        for name in self.projects:
            self.msu.deleteProject(name, self.u)
        for dirs in self.dirs_to_delete:
            shutil.rmtree(dirs)

    def create_project(self, name, user,
                       options=None):
        self.msu.createProject(name, user,
                               options)
        self.projects.append(name)

    def _test_update_issue_hooks(self, comment_template, status):
        """ A referenced issue in commit msg triggers the hook
        """
        pname = 'p_%s' % create_random_str()

        # Be sure the project does not exist
        self.msu.deleteProject(pname, self.u)

        # Create the project
        self.create_project(pname, self.u)
        # Put USER_2 as core for the project
        self.gu.add_group_member(self.u2, "%s-core" % pname)

        # Create an issue on the project
        issue_id = self.rm.create_issue(pname, "There is a problem")

        # Clone and commit something
        url = "ssh://%s@%s:29418/%s" % (self.u, config.GATEWAY_HOST,
                                        pname)
        clone_dir = self.gitu.clone(url, pname)
        cmt_msg = comment_template % issue_id
        self.gitu.add_commit_and_publish(clone_dir, 'master', cmt_msg)

        # Check issue status (Gerrit hook updates the issue to in progress)
        attempt = 0
        while True:
            if self.rm.test_issue_status(issue_id, 'In Progress'):
                break
            if attempt > 10:
                break
            time.sleep(1)
            attempt += 1
        self.assertTrue(self.rm.test_issue_status(issue_id, 'In Progress'))
        self._test_merging(pname, issue_id, status)

    def _test_merging(self, pname, issue_id, status):
        # Get the change id and merge the patch
        change_ids = self.gu.get_my_changes_for_project(pname)
        self.assertGreater(len(change_ids), 0)
        change_id = change_ids[0]
        self.gu.submit_change_note(change_id, "current", "Code-Review", "2")
        self.gu.submit_change_note(change_id, "current", "Workflow", "1")
        self.gu.submit_change_note(change_id, "current", "Verified", "2")
        self.gu2.submit_change_note(change_id, "current", "Code-Review", "2")
        self.assertTrue(self.gu.submit_patch(change_id, "current"))

        # Check issue status (Gerrit hook updates the issue to in progress)
        attempt = 0
        while True:
            if self.rm.test_issue_status(issue_id, status):
                break
            if attempt > 10:
                break
            time.sleep(1)
            attempt += 1
        self.assertTrue(self.rm.test_issue_status(issue_id, status))

    def test_gerrit_hook(self):
        """test various commit messages triggering a hook"""
        for template, final_status in TEST_MSGS:
            self._test_update_issue_hooks(template, final_status)
class TestProjectReplication(Base):
    """ Functional tests to verify the gerrit replication feature
    """
    def setUp(self):
        self.msu = ManageSfUtils(config.GATEWAY_URL)
        self.un = config.ADMIN_USER
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[self.un]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
        self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        self.gitu_admin = GerritGitUtils(self.un,
                                         priv_key_path,
                                         config.USERS[self.un]['email'])

        # Prepare environment for git clone on mirror repo
        self.mt = Tool()
        self.mt_tempdir = tempfile.mkdtemp()
        # Copy the service private key in a flat file
        priv_key = file(config.SERVICE_PRIV_KEY_PATH, 'r').read()
        priv_key_path = os.path.join(self.mt_tempdir, 'user.priv')
        file(priv_key_path, 'w').write(priv_key)
        os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE)
        # Prepare the ssh wrapper script
        ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % (
            priv_key_path)
        wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh')
        file(wrapper_path, 'w').write(ssh_wrapper)
        os.chmod(wrapper_path, stat.S_IRWXU)
        # Set the wrapper as GIT_SSH env variable
        self.mt.env['GIT_SSH'] = wrapper_path

        self.config_clone_dir = None

        # Project we are going to configure the replication for
        self.pname = 'test/replication'

        # Remove artifacts of previous run if any
        self.delete_config_section(self.un, self.pname)
        self.delete_mirror_repo(self.pname)

    def tearDown(self):
        self.delete_config_section(self.un, self.pname)
        self.delete_mirror_repo(self.pname)
        self.msu.deleteProject(self.pname, self.un)
        self.gu2.del_pubkey(self.k_idx)

    def clone(self, uri, target):
        self.assertTrue(uri.startswith('ssh://'))
        cmd = "git clone %s %s" % (uri, target)
        clone = os.path.join(self.mt_tempdir, target)
        if os.path.isdir(clone):
            shutil.rmtree(clone)
        self.mt.exe(cmd, self.mt_tempdir)
        return clone

    def create_project(self, name, user, options=None):
        self.msu.createProject(name, user, options)

    def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd):
        host = '%s@%s' % (user, host)
        sshcmd = ['ssh', '-o', 'LogLevel=ERROR',
                  '-o', 'StrictHostKeyChecking=no',
                  '-o', 'UserKnownHostsFile=/dev/null', '-i',
                  sshkey_priv_path, host]
        cmd = sshcmd + subcmd

        p = Popen(cmd, stdout=PIPE)
        return p.communicate(), p.returncode

    def delete_mirror_repo(self, name):
        mirror_path = '/home/gerrit/git/%s.git' % name
        cmd = ['ssh', 'gerrit.%s' % config.GATEWAY_HOST,
               'rm', '-rf', mirror_path]
        self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH,
                         'root',
                         config.GATEWAY_HOST, cmd)

    def create_config_section(self, project):
        host = '%s@%s' % (config.GERRIT_USER, config.GATEWAY_HOST)
        mirror_repo_path = '/home/gerrit/git/\${name}.git'
        url = '%s:%s' % (host, mirror_repo_path)
        path = os.path.join(self.config_clone_dir,
                            'gerrit/replication.config')
        call("git config -f %s --remove-section remote.test_project" %
             path, shell=True)
        call("git config -f %s --add remote.test_project.projects %s" %
             (path, project), shell=True)
        call("git config -f %s --add remote.test_project.url %s" %
             (path, url), shell=True)
        self.gitu_admin.add_commit_for_all_new_additions(
            self.config_clone_dir, "Add replication test section")
        # The direct push will trigger the config-update job
        # as we commit through 29418
        self.gitu_admin.direct_push_branch(self.config_clone_dir, 'master')
        attempts = 0
        cmd = ['ssh', 'gerrit.%s' % config.GATEWAY_HOST, 'grep',
               'test_project', '/home/gerrit/site_path/etc/replication.config']
        while attempts < 30:
            out, code = self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH,
                                         'root',
                                         config.GATEWAY_HOST, cmd)
            if code == 0:
                return
            attempts += 1
            time.sleep(2)
        raise Exception('replication.config file has not been updated (add)')

    def delete_config_section(self, user, project):
        url = "ssh://%s@%s:29418/config" % (self.un, config.GATEWAY_HOST)
        self.config_clone_dir = self.gitu_admin.clone(
            url, 'config', config_review=True)
        path = os.path.join(self.config_clone_dir, 'gerrit/replication.config')
        call("git config -f %s --remove-section remote.test_project" %
             path, shell=True)
        try:
            self.gitu_admin.add_commit_for_all_new_additions(
                self.config_clone_dir, "Remove replication test section")
        except CalledProcessError:
            # We fail if nothing to re-initialized
            return
        # The direct push will trigger the config-update job
        # as we commit through 29418
        self.gitu_admin.direct_push_branch(self.config_clone_dir, 'master')
        attempts = 0
        cmd = ['ssh', 'gerrit.%s' % config.GATEWAY_HOST, 'grep',
               'test_project',
               '/home/gerrit/site_path/etc/replication.config']
        while attempts < 30:
            out, code = self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH,
                                         'root',
                                         config.GATEWAY_HOST, cmd)
            if code != 0:
                return
            attempts += 1
            time.sleep(2)
        raise Exception('replication.config has not been updated (rm)')

    def mirror_clone_and_check_files(self, url, pname):
        retries = 0
        while True:
            clone = self.clone(url, pname)
            # clone may fail, as mirror repo is not yet ready(i.e gerrit not
            # yet replicated the project)
            if os.path.isdir(clone) and \
               os.path.isfile(os.path.join(clone, '.gitreview')):
                return True
            elif retries > 50:
                break
            else:
                time.sleep(3)
                retries += 1
        return False

    def test_replication(self):
        """ Test gerrit replication for review process
        """
        # Create the project
        self.create_project(self.pname, self.un)

        # Be sure sftests.com host key is inside the known_hosts
        cmds = [['ssh', 'gerrit.%s' % config.GATEWAY_HOST,
                 'ssh-keyscan', 'sftests.com', '>',
                 '/home/gerrit/.ssh/known_hosts']]
        for cmd in cmds:
            self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH,
                             'root',
                             config.GATEWAY_HOST, cmd)

        # Create new section for this project in replication.config
        self.create_config_section(self.pname)

        # Verify if gerrit replicated the repo
        self.managesf_repo_path = "ssh://%s@%s/home/gerrit/git/" % (
            'root', config.GATEWAY_HOST)
        repo_url = self.managesf_repo_path + '%s.git' % self.pname
        self.assertTrue(self.mirror_clone_and_check_files(repo_url,
                                                          self.pname))
    def test_check_zuul_operations(self):
        """ Test if zuul verifies project correctly through zuul-demo project
        """
        # zuul-demo - test project used exclusively to test zuul installation
        # The necessary project descriptions are already declared in Jenkins
        # and zuul

        pname = 'demo/zuul-demo'

        self.create_project(pname, config.ADMIN_USER)
        un = config.ADMIN_USER
        gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[un]['auth_cookie'])
        ju = JenkinsUtils()
        k_index = gu.add_pubkey(config.USERS[un]["pubkey"])
        # Gerrit part
        self.assertTrue(gu.project_exists(pname))
        priv_key_path = set_private_key(config.USERS[un]["privkey"])
        gitu = GerritGitUtils(un,
                              priv_key_path,
                              config.USERS[un]['email'])
        url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))

        last_fail_build_num_ft = \
            ju.get_last_build_number("zuul-demo-functional-tests",
                                     "lastFailedBuild")
        last_fail_build_num_ut = \
            ju.get_last_build_number("zuul-demo-unit-tests",
                                     "lastFailedBuild")
        last_succeed_build_num_ft = \
            ju.get_last_build_number("zuul-demo-functional-tests",
                                     "lastSuccessfulBuild")
        last_succeed_build_num_ut = \
            ju.get_last_build_number("zuul-demo-unit-tests",
                                     "lastSuccessfulBuild")

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        change_ids = gu.get_my_changes_for_project(pname)
        self.assertGreater(len(change_ids), 0)
        change_id = change_ids[0]

        # Give some time for jenkins to work
        ju.wait_till_job_completes("zuul-demo-functional-tests",
                                   last_fail_build_num_ft, "lastFailedBuild")
        ju.wait_till_job_completes("zuul-demo-unit-tests",
                                   last_fail_build_num_ut, "lastFailedBuild")

        attempt = 0
        while "jenkins" not in gu.get_reviewers(change_id):
            if attempt >= 90:
                break
            time.sleep(1)
            attempt += 1

        attempt = 0
        while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \
                != '-1':
            if attempt >= 90:
                break
            time.sleep(1)
            attempt += 1

        self.assertEqual(
            gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '-1')

        # Add the test case files and resubmit for review
        data = "echo Working"
        files = ["run_functional-tests.sh", "run_tests.sh"]

        for f in files:
            file(os.path.join(clone_dir, f), 'w').write(data)
            os.chmod(os.path.join(clone_dir, f), 0755)

        gitu.add_commit_and_publish(clone_dir, "master", None, fnames=files)

        # Give some time for jenkins to work
        ju.wait_till_job_completes("zuul-demo-functional-tests",
                                   last_succeed_build_num_ft,
                                   "lastSuccessfulBuild")
        ju.wait_till_job_completes("zuul-demo-unit-tests",
                                   last_succeed_build_num_ut,
                                   "lastSuccessfulBuild")

        attempt = 0
        while "jenkins" not in gu.get_reviewers(change_id):
            if attempt >= 90:
                break
            time.sleep(1)
            attempt += 1

        attempt = 0
        while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \
                != '+1':
            if attempt >= 90:
                break
            time.sleep(1)
            attempt += 1

        self.assertEqual(
            gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '+1')

        gu.del_pubkey(k_index)
Esempio n. 46
0
class TestProjectReplication(Base):
    """ Functional tests to verify the gerrit replication feature
    """
    def setUp(self):
        super(TestProjectReplication, self).setUp()
        self.ru = ResourcesUtils()
        self.un = config.ADMIN_USER
        self.ju = JenkinsUtils()
        self.gu = GerritUtils(config.GATEWAY_URL,
                              auth_cookie=config.USERS[self.un]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
        self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        self.gitu_admin = GerritGitUtils(self.un, priv_key_path,
                                         config.USERS[self.un]['email'])

        # Prepare environment for git clone on mirror repo
        self.mt = Tool()
        self.mt_tempdir = tempfile.mkdtemp()
        # Copy the service private key in a flat file
        priv_key = file(config.SERVICE_PRIV_KEY_PATH, 'r').read()
        priv_key_path = os.path.join(self.mt_tempdir, 'user.priv')
        file(priv_key_path, 'w').write(priv_key)
        os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE)
        # Prepare the ssh wrapper script
        ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % (
            priv_key_path)
        wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh')
        file(wrapper_path, 'w').write(ssh_wrapper)
        os.chmod(wrapper_path, stat.S_IRWXU)
        # Set the wrapper as GIT_SSH env variable
        self.mt.env['GIT_SSH'] = wrapper_path

        self.config_clone_dir = None

        # Project we are going to configure the replication for
        self.pname = 'test/replication'

        # Remove artifacts of previous run if any
        self.delete_config_section(self.un, self.pname)
        self.delete_mirror_repo(self.pname)

    def tearDown(self):
        super(TestProjectReplication, self).tearDown()
        self.delete_config_section(self.un, self.pname)
        self.delete_mirror_repo(self.pname)
        self.ru.direct_delete_repo(self.pname)
        self.gu2.del_pubkey(self.k_idx)

    def clone(self, uri, target):
        self.assertTrue(uri.startswith('ssh://'))
        cmd = "git clone %s %s" % (uri, target)
        clone = os.path.join(self.mt_tempdir, target)
        if os.path.isdir(clone):
            shutil.rmtree(clone)
        self.mt.exe(cmd, self.mt_tempdir)
        return clone

    def create_project(self, name):
        logger.info("Create repo to for testing replication %s" % name)
        self.ru.direct_create_repo(name)

    def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd):
        host = '%s@%s' % (user, host)
        sshcmd = [
            'ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no',
            '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host
        ]
        cmd = sshcmd + subcmd

        p = Popen(cmd, stdout=PIPE)
        return p.communicate(), p.returncode

    def delete_mirror_repo(self, name):
        logger.info("Delete mirror repo created by the replication")
        mirror_path = '/var/lib/gerrit/tmp/%s.git' % name
        cmd = [
            'ssh',
            'gerrit.%s' % config.GATEWAY_HOST, 'rm', '-rf', mirror_path
        ]
        self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                         config.GATEWAY_HOST, cmd)

    def create_config_section(self, project):
        logger.info("Add the replication config section")
        host = '%s@%s' % (config.GERRIT_USER, config.GATEWAY_HOST)
        mirror_repo_path = '/var/lib/gerrit/tmp/\${name}.git'
        url = '%s:%s' % (host, mirror_repo_path)
        path = os.path.join(self.config_clone_dir, 'gerrit/replication.config')
        call("git config -f %s --remove-section remote.test_project" % path,
             shell=True)
        call("git config -f %s --add remote.test_project.projects %s" %
             (path, project),
             shell=True)
        call("git config -f %s --add remote.test_project.url %s" % (path, url),
             shell=True)
        self.gitu_admin.add_commit_for_all_new_additions(
            self.config_clone_dir, "Add replication test section")
        # The direct push will trigger the config-update job
        # as we commit through 29418
        change_sha = self.gitu_admin.direct_push_branch(
            self.config_clone_dir, 'master')
        logger.info("Waiting for config-update on %s" % change_sha)
        self.ju.wait_for_config_update(change_sha)
        cmd = [
            'ssh',
            'gerrit.%s' % config.GATEWAY_HOST, 'grep', 'test_project',
            '/etc/gerrit/replication.config'
        ]
        logger.info("Wait for the replication config section to land")
        _, code = self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                                   config.GATEWAY_HOST, cmd)
        if code == 0:
            return
        raise Exception('replication.config file has not been updated (add)')

    def delete_config_section(self, user, project):
        logger.info("Remove the replication config section")
        url = "ssh://%s@%s:29418/config" % (self.un, config.GATEWAY_HOST)
        self.config_clone_dir = self.gitu_admin.clone(url,
                                                      'config',
                                                      config_review=True)
        sha = open("%s/.git/refs/heads/master" %
                   self.config_clone_dir).read().strip()
        path = os.path.join(self.config_clone_dir, 'gerrit/replication.config')
        call("git config -f %s --remove-section remote.test_project" % path,
             shell=True)
        change_sha = self.gitu_admin.add_commit_for_all_new_additions(
            self.config_clone_dir, "Remove replication test section")
        # The direct push will trigger the config-update job
        # as we commit through 29418
        if change_sha == sha:
            # Nothing have been changed/Nothing to publish
            return
        change_sha = self.gitu_admin.direct_push_branch(
            self.config_clone_dir, 'master')
        logger.info("Waiting for config-update on %s" % change_sha)
        self.ju.wait_for_config_update(change_sha)
        cmd = [
            'ssh',
            'gerrit.%s' % config.GATEWAY_HOST, 'grep', 'test_project',
            '/etc/gerrit/replication.config'
        ]
        _, code = self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                                   config.GATEWAY_HOST, cmd)
        if code != 0:
            return
        raise Exception('replication.config has not been updated (rm)')

    def mirror_clone_and_check_files(self, url, pname):
        for retry in xrange(50):
            clone = self.clone(url, pname)
            # clone may fail, as mirror repo is not yet ready(i.e gerrit not
            # yet replicated the project)
            if os.path.isdir(clone):
                logger.info("Files in the mirror repo: %s" % os.listdir(clone))
            if os.path.isdir(clone) and \
               os.path.isfile(os.path.join(clone, '.gitreview')):
                break
            else:
                time.sleep(3)
        self.assertTrue(os.path.exists(os.path.join(clone, '.gitreview')))

    def test_replication(self):
        """ Test gerrit replication for review process
        """
        # Create the project
        self.create_project(self.pname)

        # Be sure sftests.com host key is inside the known_hosts
        cmds = [[
            'ssh',
            'gerrit.%s' % config.GATEWAY_HOST, 'ssh-keyscan', 'sftests.com',
            '>', '/var/lib/gerrit/.ssh/known_hosts'
        ]]
        for cmd in cmds:
            self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                             config.GATEWAY_HOST, cmd)

        # Create new section for this project in replication.config
        self.create_config_section(self.pname)

        # Verify if gerrit replicated the repo
        self.managesf_repo_path = "ssh://%s@%s/var/lib/gerrit/tmp/" % (
            'root', config.GATEWAY_HOST)
        repo_url = self.managesf_repo_path + '%s.git' % self.pname
        logger.info("Wait for the replication to happen")
        self.mirror_clone_and_check_files(repo_url, self.pname)
class TestGerritHooks(Base):
    """ Functional tests that validate Gerrit hooks.
    """
    @classmethod
    def setUpClass(cls):
        cls.msu = ManageSfUtils(config.GATEWAY_URL)

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.projects = []
        self.dirs_to_delete = []
        self.issues = []
        self.u = config.ADMIN_USER
        self.u2 = config.USER_2
        self.rm = RedmineUtils(
            config.GATEWAY_URL + "/redmine/",
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.gu = GerritUtils(config.GATEWAY_URL,
                              auth_cookie=config.USERS[self.u]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[self.u2]['auth_cookie'])
        self.gu.add_pubkey(config.USERS[self.u]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.u]["privkey"])
        self.gitu = GerritGitUtils(self.u, priv_key_path,
                                   config.USERS[self.u]['email'])

    def tearDown(self):
        for issue in self.issues:
            self.rm.delete_issue(issue)
        for name in self.projects:
            self.msu.deleteProject(name, self.u)
        for dirs in self.dirs_to_delete:
            shutil.rmtree(dirs)

    def create_project(self, name, user, options=None):
        self.msu.createProject(name, user, options)
        self.projects.append(name)

    def _test_update_issue_hooks(self, comment_template, status):
        """ A referenced issue in commit msg triggers the hook
        """
        pname = 'p_%s' % create_random_str()

        # Be sure the project does not exist
        self.msu.deleteProject(pname, self.u)

        # Create the project
        self.create_project(pname, self.u)
        # Put USER_2 as core for the project
        self.gu.add_group_member(self.u2, "%s-core" % pname)

        # Create an issue on the project
        issue_id = self.rm.create_issue(pname, "There is a problem")

        # Clone and commit something
        url = "ssh://%s@%s:29418/%s" % (self.u, config.GATEWAY_HOST, pname)
        clone_dir = self.gitu.clone(url, pname)
        cmt_msg = comment_template % issue_id
        self.gitu.add_commit_and_publish(clone_dir, 'master', cmt_msg)

        # Check issue status (Gerrit hook updates the issue to in progress)
        attempt = 0
        while True:
            if self.rm.test_issue_status(issue_id, 'In Progress'):
                break
            if attempt > 10:
                break
            time.sleep(1)
            attempt += 1
        self.assertTrue(self.rm.test_issue_status(issue_id, 'In Progress'))
        self._test_merging(pname, issue_id, status)

    def _test_merging(self, pname, issue_id, status):
        # Get the change id and merge the patch
        change_ids = self.gu.get_my_changes_for_project(pname)
        self.assertGreater(len(change_ids), 0)
        change_id = change_ids[0]
        self.gu.submit_change_note(change_id, "current", "Code-Review", "2")
        self.gu.submit_change_note(change_id, "current", "Workflow", "1")
        self.gu.submit_change_note(change_id, "current", "Verified", "2")
        self.gu2.submit_change_note(change_id, "current", "Code-Review", "2")
        self.assertTrue(self.gu.submit_patch(change_id, "current"))

        # Check issue status (Gerrit hook updates the issue to in progress)
        attempt = 0
        while True:
            if self.rm.test_issue_status(issue_id, status):
                break
            if attempt > 10:
                break
            time.sleep(1)
            attempt += 1
        self.assertTrue(self.rm.test_issue_status(issue_id, status))

    def test_gerrit_hook(self):
        """test various commit messages triggering a hook"""
        for template, final_status in TEST_MSGS:
            self._test_update_issue_hooks(template, final_status)
Esempio n. 48
0
class TestLogExportedInElasticSearch(Base):
    """ Functional tests to verify job logs are exported in ElasticSearch
    """
    def setUp(self):
        super(TestLogExportedInElasticSearch, self).setUp()
        self.un = config.ADMIN_USER
        self.priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        self.gitu_admin = GerritGitUtils(self.un,
                                         self.priv_key_path,
                                         config.USERS[self.un]['email'])

    def run_ssh_cmd(self, sshkey_priv_path, user, host, subcmd):
        host = '%s@%s' % (user, host)
        sshcmd = ['ssh', '-o', 'LogLevel=ERROR',
                  '-o', 'StrictHostKeyChecking=no',
                  '-o', 'UserKnownHostsFile=/dev/null', '-i',
                  sshkey_priv_path, host]
        cmd = sshcmd + subcmd

        p = Popen(cmd, stdout=PIPE)
        return p.communicate(), p.returncode

    def push_request_script(self, index, newhash):
        newhash = newhash.rstrip()
        content = """
curl -s -XPOST 'http://elasticsearch.%s:9200/%s/_search?pretty&size=1' -d '{
      "query": {
          "bool": {
              "must": [
                  { "match": { "build_name": "config-update" } },
                  { "match": { "build_newrev": "%s" } }
              ]
          }
      }
}'
"""
        with open('/tmp/test_request.sh', 'w') as fd:
            fd.write(content % (config.GATEWAY_HOST, index, newhash))
        cmd = ['scp', '/tmp/test_request.sh',
               'root@%s:/tmp/test_request.sh' % config.GATEWAY_HOST]
        p = Popen(cmd, stdout=PIPE)
        return p.communicate(), p.returncode

    def find_index(self):
        subcmd = "curl -s -XGET http://elasticsearch.%s:9200/_cat/indices" % (
            config.GATEWAY_HOST)
        subcmd = shlex.split(subcmd)
        # A logstash index is created by day
        today_str = datetime.datetime.utcnow().strftime('%Y.%m.%d')
        # Here we fetch the index name, but also we wait for
        # it to appears in ElasticSearch for 5 mins
        index = []
        for retry in xrange(300):
            try:
                out = self.run_ssh_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                                       config.GATEWAY_HOST, subcmd)
                outlines = out[0][0].split('\n')
                outlines.pop()
                index = [o for o in outlines if
                         o.split()[2].startswith('logstash-%s' % today_str)]
                if len(index):
                    break
            except:
                time.sleep(1)
        self.assertEqual(
            len(index),
            1,
            "No logstash index has been found for today logstash-%s (%s)" % (
                today_str, str(index)))
        index = index[0].split()[2]
        return index

    def verify_logs_exported(self):
        subcmd = "bash /tmp/test_request.sh"
        subcmd = shlex.split(subcmd)
        for retry in xrange(300):
            out = self.run_ssh_cmd(config.SERVICE_PRIV_KEY_PATH, 'root',
                                   config.GATEWAY_HOST, subcmd)
            ret = json.loads(out[0][0])
            if len(ret['hits']['hits']) >= 1:
                break
            elif len(ret['hits']['hits']) == 0:
                time.sleep(1)
        self.assertEqual(len(ret['hits']['hits']),
                         1,
                         "Fail to find our log in ElasticSeach")
        return ret['hits']['hits'][0]

    def direct_push_in_config_repo(self, url, pname='config'):
        rand_str = ''.join(random.choice(
            string.ascii_uppercase + string.digits) for _ in range(5))
        clone = self.gitu_admin.clone(url, pname)
        with open('%s/test_%s' % (clone, rand_str), 'w') as fd:
            fd.write('test')
        self.gitu_admin.add_commit_in_branch(
            clone, 'master', ['test_%s' % rand_str])
        head = file('%s/.git/refs/heads/master' % clone).read()
        self.gitu_admin.direct_push_branch(clone, 'master')
        return head

    @skipIfServiceMissing('elasticsearch')
    def test_log_indexation(self):
        """ Test job log are exported in Elasticsearch
        """
        head = self.direct_push_in_config_repo(
            'ssh://admin@%s:29418/config' % (
                config.GATEWAY_HOST))
        index = self.find_index()
        self.push_request_script(index, head)
        log = self.verify_logs_exported()
        self.assertEqual(log['_source']["build_name"], "config-update")
Esempio n. 49
0
class SFProvisioner(object):
    """ This provider is only intended for testing
    SF backup/restore and update. It provisions some
    user datas in a SF installation based on a resourses.yaml
    file. Later those data can be checked by its friend
    the SFChecker.

    Provisioned data should remain really simple.
    """
    def __init__(self):
        with open("%s/resources.yaml" % pwd, 'r') as rsc:
            self.resources = yaml.load(rsc)
        config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
            config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
        self.msu = ManageSfUtils(config.GATEWAY_URL)
        self.ggu = GerritGitUtils(config.ADMIN_USER,
                                  config.ADMIN_PRIV_KEY_PATH,
                                  config.USERS[config.ADMIN_USER]['email'])
        self.ju = JenkinsUtils()
        self.rm = RedmineUtils(
            config.GATEWAY_URL + "/redmine/",
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])

    def create_project(self, name):
        print " Creating project %s ..." % name
        self.msu.createProject(name, config.ADMIN_USER)

    def push_files_in_project(self, name, files):
        print " Add files(%s) in a commit ..." % ",".join(files)
        # TODO(fbo); use gateway host instead of gerrit host
        self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                             config.GATEWAY_HOST, name)
        clone_dir = self.ggu.clone(self.url, name, config_review=False)
        self.clone_dir = clone_dir
        for f in files:
            file(os.path.join(clone_dir, f), 'w').write('data')
            self.ggu.git_add(clone_dir, (f,))
        self.ggu.add_commit_for_all_new_additions(clone_dir)
        self.ggu.direct_push_branch(clone_dir, 'master')

    def create_issues_on_project(self, name, issues):
        print " Create %s issue(s) for that project ..." % len(issues)
        for i in issues:
            if is_present('SFRedmine'):
                issue = self.rm.create_issue(name, i['name'])
            else:
                issue = random.randint(1,100)
            yield issue, i['review']

    def create_jenkins_jobs(self, name, jobnames):
        print " Create Jenkins jobs(%s)  ..." % ",".join(jobnames)
        for jobname in jobnames:
            self.ju.create_job("%s_%s" % (name, jobname))

    def create_pads(self, amount):
        # TODO
        pass

    def create_pasties(self, amount):
        # TODO
        pass


    def simple_login(self, user):
        """log as user to make the user listable"""
        get_cookie(user, config.USERS[user]['password'])


    def create_review(self, project, issue):
        """Very basic review creator for statistics and restore tests
        purposes."""
        self.ggu.config_review(self.clone_dir)
        self.ggu.add_commit_in_branch(self.clone_dir,
                                      'branch_' + issue,
                                      commit='test\n\nBug: %s' % issue)
        self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue)

    def create_local_user(self, username, password, email):
        self.msu.create_user(username, password, email)

    def command(self, cmd):
        return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"),
                           "root",
                           config.GATEWAY_HOST, shlex.split(cmd))

    def compute_checksum(self, f):
        out = self.command("md5sum %s" % f)[0]
        if out:
            return out.split()[0]

    def provision(self):
        for cmd in self.resources['commands']:
            print "Execute command %s" % cmd['cmd']
            print self.command(cmd['cmd'])
        checksum_list = {}
        for checksum in self.resources['checksum'] :
            print "Compute checksum for file %s" % checksum['file']
            checksum_list[checksum['file']] = self.compute_checksum(
                checksum['file'])
        yaml.dump(checksum_list, file('/tmp/pc_checksums.yaml', 'w'))
        for user in self.resources['local_users']:
            print "Create local user %s" % user['username']
            self.create_local_user(user['username'],
                                   user['password'],
                                   user['email'])
        for u in self.resources['users']:
            print "log in as %s" % u['name']
            self.simple_login(u['name'])
        for project in self.resources['projects']:
            print "Create user datas for %s" % project['name']
            self.create_project(project['name'])
            self.push_files_in_project(project['name'],
                                       [f['name'] for f in project['files']])
            for i, review in self.create_issues_on_project(project['name'],
                                                           project['issues']):
                if review:
                    print "Create review for bug %i in %s" % (i,
                                                              project['name'])
                    self.create_review(project['name'], str(i))
            self.create_jenkins_jobs(project['name'],
                                     [j['name'] for j in project['jobnames']])
        self.create_pads(2)
        self.create_pasties(2)
    def test_replication(self):
        """ Test gerrit replication for review process
        """
        # Be sure the project, mirror repo, project in config don't exist
        self.deleteMirrorRepo(self.pname)
        self.deleteConfigSection(self.un, self.pname)
        self.msu.deleteProject(self.pname, self.un)

        # Create the project
        self.create_project(self.pname, self.un)

        # Create new section for this project in replication.config
        self.createConfigSection(self.un, self.pname)

        # Force gerrit to read its known_hosts file. The only
        # way to do that is by restarting gerrit. The Puppet Gerrit
        # manifest will restart gerrit if a new entry in known_hosts_gerrit
        # is discovered.
        # This may take some time (gerrit in some condition take long
        # to be fully up)
        call("ssh [email protected] systemctl restart gerrit", shell=True)
        call("ssh [email protected] /root/wait4gerrit.sh", shell=True)

        # Clone the project and submit it for review
        priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        gitu = GerritGitUtils(self.un,
                              priv_key_path,
                              config.USERS[self.un]['email'])
        url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST,
                                        self.pname)
        clone_dir = gitu.clone(url, self.pname)

        gitu.add_commit_and_publish(clone_dir, "master", "Test commit")

        # Add 2 files and resubmit for review
        data = "echo Working"
        us_files = ["run_functional-tests.sh", "run_tests.sh"]

        for f in us_files:
            file(os.path.join(clone_dir, f), 'w').write(data)
            os.chmod(os.path.join(clone_dir, f), 0755)

        gitu.add_commit_and_publish(clone_dir, "master", None, fnames=us_files)

        # Review the patch and merge it
        change_ids = self.gu.get_my_changes_for_project(self.pname)
        self.assertGreater(len(change_ids), 0)
        change_id = change_ids[0]
        self.gu.submit_change_note(change_id, "current", "Code-Review", "2")
        self.gu.submit_change_note(change_id, "current", "Verified", "2")
        self.gu.submit_change_note(change_id, "current", "Workflow", "1")
        # Put USER_2 as core for config project
        grp_name = '%s-core' % self.pname
        self.gu.add_group_member(config.USER_2, grp_name)
        self.gu2.submit_change_note(change_id, "current", "Code-Review", "2")
        self.assertTrue(self.gu.submit_patch(change_id, "current"))
        shutil.rmtree(clone_dir)

        # Verify if gerrit automatically triggered replication
        # Mirror repo(in mysql node) should have these latest changes
        # Clone the mirror repo(from mysql) and check for the 2 files
        msql_repo_url = self.msql_repo_path + '%s.git' % self.pname
        self.mirror_clone_and_check_files(msql_repo_url, self.pname, us_files)
Esempio n. 51
0
class SFProvisioner(object):
    """ This provider is only intended for testing
    SF backup/restore and update. It provisions some
    user datas in a SF installation based on a resourses.yaml
    file. Later those data can be checked by its friend
    the SFChecker.

    Provisioned data should remain really simple.
    """
    def __init__(self):
        with open('resources.yaml', 'r') as rsc:
            self.resources = yaml.load(rsc)
        config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
            config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
        self.msu = ManageSfUtils(config.GATEWAY_URL)
        self.ggu = GerritGitUtils(config.ADMIN_USER,
                                  config.ADMIN_PRIV_KEY_PATH,
                                  config.USERS[config.ADMIN_USER]['email'])
        self.ju = JenkinsUtils()
        self.rm = RedmineUtils(
            config.REDMINE_URL,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])

    def create_project(self, name):
        print " Creating project %s ..." % name
        self.msu.createProject(name, config.ADMIN_USER)

    def push_files_in_project(self, name, files):
        print " Add files(%s) in a commit ..." % ",".join(files)
        # TODO(fbo); use gateway host instead of gerrit host
        self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                             config.GATEWAY_HOST, name)
        clone_dir = self.ggu.clone(self.url, name, config_review=False)
        self.clone_dir = clone_dir
        for f in files:
            file(os.path.join(clone_dir, f), 'w').write('data')
            self.ggu.git_add(clone_dir, (f,))
        self.ggu.add_commit_for_all_new_additions(clone_dir)
        self.ggu.direct_push_branch(clone_dir, 'master')

    def create_issues_on_project(self, name, issues):
        print " Create %s issue(s) for that project ..." % len(issues)
        for i in issues:
            issue = self.rm.create_issue(name, i['name'])
            yield issue, i['review']

    def create_jenkins_jobs(self, name, jobnames):
        print " Create Jenkins jobs(%s)  ..." % ",".join(jobnames)
        for jobname in jobnames:
            self.ju.create_job("%s_%s" % (name, jobname))

    def create_pads(self, amount):
        # TODO
        pass

    def create_pasties(self, amount):
        # TODO
        pass

    def create_review(self, project, issue):
        """Very basic review creator for statistics and restore tests
        purposes."""
        self.ggu.config_review(self.clone_dir)
        self.ggu.add_commit_in_branch(self.clone_dir,
                                      'branch_' + issue,
                                      commit='test\n\nBug: %s' % issue)
        self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue)

    def provision(self):
        for project in self.resources['projects']:
            print "Create user datas for %s" % project['name']
            self.create_project(project['name'])
            self.push_files_in_project(project['name'],
                                       [f['name'] for f in project['files']])
            for i, review in self.create_issues_on_project(project['name'],
                                                           project['issues']):
                if review:
                    print "Create review for bug %i in %s" % (i,
                                                              project['name'])
                    self.create_review(project['name'], str(i))
            self.create_jenkins_jobs(project['name'],
                                     [j['name'] for j in project['jobnames']])
        self.create_pads(2)
        self.create_pasties(2)
    def test_check_add_automatic_reviewers(self):
        """ Test if reviewers-by-blame plugin works
        """
        pname = 'p_%s' % create_random_str()
        u2mail = config.USERS[config.USER_2]['email']
        options = {'core-group': u2mail}
        self.create_project(pname, options)
        first_u = config.ADMIN_USER
        gu_first_u = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[first_u]['auth_cookie'])
        self.assertTrue(gu_first_u.project_exists(pname))
        # Push data in the create project as Admin user
        k1_index = gu_first_u.add_pubkey(config.USERS[first_u]["pubkey"])
        priv_key_path = set_private_key(config.USERS[first_u]["privkey"])
        gitu = GerritGitUtils(first_u,
                              priv_key_path,
                              config.USERS[first_u]['email'])
        url = "ssh://%s@%s:29418/%s" % (first_u, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))
        data = ['this', 'is', 'a', 'couple', 'of', 'lines']
        clone_dir = gitu.clone(url, pname)
        file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data))
        gitu.add_commit_and_publish(clone_dir, "master", "Test commit",
                                    fnames=["file"])
        # Get the change id
        change_ids = gu_first_u.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]
        # Merge the change
        gu_first_u.submit_change_note(change_id, "current", "Code-Review", "2")
        gu_first_u.submit_change_note(change_id, "current", "Verified", "2")
        gu_first_u.submit_change_note(change_id, "current", "Workflow", "1")
        second_u = config.USER_2
        gu_second_u = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[second_u]['auth_cookie'])
        self.assertTrue(gu_first_u.submit_patch(change_id, "current"))
        # Change the file we have commited with Admin user
        k2_index = gu_second_u.add_pubkey(config.USERS[second_u]["pubkey"])
        priv_key_path = set_private_key(config.USERS[second_u]["privkey"])
        gitu = GerritGitUtils(second_u,
                              priv_key_path,
                              config.USERS[second_u]['email'])
        url = "ssh://%s@%s:29418/%s" % (second_u, config.GATEWAY_HOST,
                                        pname)
        clone_dir = gitu.clone(url, pname)
        self.dirs_to_delete.append(os.path.dirname(clone_dir))
        data = ['this', 'is', 'some', 'lines']
        file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data))
        gitu.add_commit_and_publish(clone_dir, "master", "Test commit",
                                    fnames=["file"])
        # Get the change id
        change_ids = gu_second_u.get_my_changes_for_project(pname)
        self.assertEqual(len(change_ids), 1)
        change_id = change_ids[0]
        # Verify first_u has been automatically added to reviewers
        attempts = 0
        while True:
            if len(gu_second_u.get_reviewers(change_id)) > 0 or attempts >= 3:
                break
            attempts += 1
            time.sleep(1)
        reviewers = gu_second_u.get_reviewers(change_id)
        self.assertGreaterEqual(len(reviewers), 1)
        self.assertTrue(first_u in reviewers)

        gu_first_u.del_pubkey(k1_index)
        gu_second_u.del_pubkey(k2_index)
Esempio n. 53
0
class TestRepoxplorer(Base):
    def setUp(self):
        super(TestRepoxplorer, self).setUp()
        priv_key_path = set_private_key(
            config.USERS[config.ADMIN_USER]["privkey"])
        self.gitu_admin = GerritGitUtils(
            config.ADMIN_USER, priv_key_path,
            config.USERS[config.ADMIN_USER]['email'])
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.ju = JenkinsUtils()

        self.dirs_to_delete = []

    def tearDown(self):
        super(TestRepoxplorer, self).tearDown()
        for dirs in self.dirs_to_delete:
            shutil.rmtree(dirs)

    def clone_as_admin(self, pname):
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST,
                                        pname)
        clone_dir = self.gitu_admin.clone(url, pname)
        if os.path.dirname(clone_dir) not in self.dirs_to_delete:
            self.dirs_to_delete.append(os.path.dirname(clone_dir))
        return clone_dir

    def commit_direct_push_as_admin(self, clone_dir, msg):
        # Stage, commit and direct push the additions on master
        self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg)
        return self.gitu_admin.direct_push_branch(clone_dir, 'master')

    def set_resources_then_direct_push(self,
                                       fpath,
                                       resources=None,
                                       mode='add'):
        config_clone_dir = self.clone_as_admin("config")
        path = os.path.join(config_clone_dir, fpath)
        if mode == 'add':
            file(path, 'w').write(resources)
        elif mode == 'del':
            os.unlink(path)
        change_sha = self.commit_direct_push_as_admin(
            config_clone_dir, "Add new resources for functional tests")
        config_update_log = self.ju.wait_for_config_update(change_sha)
        self.assertIn("SUCCESS", config_update_log)

    def get_projects(self):
        url = config.GATEWAY_URL + "/repoxplorer/projects.json/"
        resp = requests.get(url)
        self.assertEqual(resp.status_code, 200)
        return resp.json()

    def get_groups(self):
        url = config.GATEWAY_URL + "/repoxplorer/api_groups.json/"
        resp = requests.get(url)
        self.assertEqual(resp.status_code, 200)
        return resp.json()

    @skipIfServiceMissing('repoxplorer')
    def test_repoxplorer_accessible(self):
        """ Test if RepoXplorer is accessible on gateway hosts
        """
        url = config.GATEWAY_URL + "/repoxplorer/"
        resp = requests.get(url)
        self.assertEqual(resp.status_code, 200)
        self.assertTrue('[RepoXplorer] - Projects listing' in resp.text)

    @skipIfServiceMissing('repoxplorer')
    def test_repoxplorer_data_indexed(self):
        """ Test if RepoXplorer has indexed the config repository
        """
        url = config.GATEWAY_URL + "/repoxplorer/commits.json?pid=internal"
        resp = requests.get(url)
        self.assertEqual(resp.status_code, 200)
        self.assertTrue(resp.json()[2] > 0)

    @skipIfServiceMissing('repoxplorer')
    def test_repoxplorer_displayed_top_menu(self):
        """ Test if RepoXplorer link is displayed in the top menu
        """
        url = config.GATEWAY_URL + "/topmenu.html"
        resp = requests.get(url)
        self.assertEqual(resp.status_code, 200)
        self.assertTrue('href="/repoxplorer/"' in resp.text,
                        'repoxplorer not present as a link')

    @skipIfServiceMissing('repoxplorer')
    def test_repoxplorer_config_from_resources(self):
        """ Test if RepoXPlorer is reconfigured from new resources
        """
        fpath = "resources/%s.yaml" % create_random_str()
        resources = """resources:
  projects:
    %(pname)s:
      description: An awesome project
      source-repositories:
        - %(pname)s/%(rname)s
  repos:
    %(pname)s/%(rname)s:
      description: The server part
      acl: %(pname)s
  acls:
    %(pname)s:
      file: |
        [access "refs/*"]
          read = group Anonymous Users
  groups:
    %(gname)s:
      description: test for functional test
      members:
        - [email protected]
"""
        tmpl_keys = {
            'pname': create_random_str(),
            'rname': create_random_str(),
            'gname': create_random_str()
        }

        resources = resources % tmpl_keys
        self.set_resources_then_direct_push(fpath,
                                            resources=resources,
                                            mode='add')
        projects = self.get_projects()
        groups = self.get_groups()

        self.assertIn(tmpl_keys['gname'], groups.keys())
        self.assertIn(tmpl_keys['pname'], projects['projects'].keys())
        project_repos = [
            r['name'] for r in projects['projects'][tmpl_keys['pname']]
        ]
        self.assertIn(tmpl_keys['pname'] + '/' + tmpl_keys['rname'],
                      project_repos)

        self.set_resources_then_direct_push(fpath, mode='del')

        projects = self.get_projects()
        groups = self.get_groups()

        self.assertNotIn(tmpl_keys['gname'], groups.keys())
        self.assertNotIn(tmpl_keys['pname'], projects['projects'].keys())
Esempio n. 54
0
class SFProvisioner(object):
    """ This provider is only intended for testing
    SF backup/restore and update. It provisions some
    user datas in a SF installation based on a resourses.yaml
    file. Later those data can be checked by its friend
    the SFChecker.

    Provisioned data should remain really simple.
    """
    def __init__(self):
        with open("%s/resources.yaml" % pwd, 'r') as rsc:
            self.resources = yaml.load(rsc)
        config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
            config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
        self.msu = ManageSfUtils(config.GATEWAY_URL)
        self.ru = ResourcesUtils()
        self.ggu = GerritGitUtils(config.ADMIN_USER,
                                  config.ADMIN_PRIV_KEY_PATH,
                                  config.USERS[config.ADMIN_USER]['email'])
        self.stb_client = SFStoryboard(
            config.GATEWAY_URL + "/storyboard_api",
            config.USERS[config.ADMIN_USER]['auth_cookie'])

    def create_resources(self):
        print " Creating resources ..."
        if cmp_version(os.environ.get("PROVISIONED_VERSION", "0.0"), "2.4.0"):
            # Remove review-dashboard
            for p in self.resources['resources']['projects'].values():
                del p['review-dashboard']
        self.ru.create_resources("provisioner",
                                 {'resources': self.resources['resources']})
        # Create review for the first few repositories
        for project in self.resources['resources']['repos'].keys()[:3]:
            self.clone_project(project)
            self.create_review(project, "Test review for %s" % project)

    def create_project(self, name):
        print " Creating project %s ..." % name
        self.ru.create_repo(name)

    def clone_project(self, name):
        # TODO(fbo); use gateway host instead of gerrit host
        self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                             config.GATEWAY_HOST, name)
        self.clone_dir = self.ggu.clone(self.url, name, config_review=False)

    def push_files_in_project(self, name, files):
        print " Add files(%s) in a commit ..." % ",".join(files)
        self.clone_project(name)
        for f in files:
            file(os.path.join(self.clone_dir, f), 'w').write('data')
            self.ggu.git_add(self.clone_dir, (f, ))
        self.ggu.add_commit_for_all_new_additions(self.clone_dir)
        self.ggu.direct_push_branch(self.clone_dir, 'master')

    def create_storyboard_issue(self, name, issue_name):
        project = self.stb_client.projects.get(name)
        story = self.stb_client.stories.create(title=issue_name)
        task = self.stb_client.tasks.create(story_id=story.id,
                                            project_id=project.id,
                                            title=issue_name)
        return task.id, story.id

    def create_issues_on_project(self, name, issues):
        print " Create %s issue(s) for that project ..." % len(issues)
        for i in issues:
            if is_present('storyboard'):
                issue = self.create_storyboard_issue(name, i['name'])
            else:
                issue = (random.randint(1, 100), random.randint(1, 100))
            yield issue, i['review']

    def create_pads(self, amount):
        # TODO
        pass

    def create_pasties(self, amount):
        # TODO
        pass

    def simple_login(self, user, password):
        """log as user to make the user listable"""
        get_cookie(user, password)

    def create_review(self, project, commit_message, branch='master'):
        """Very basic review creator for statistics and restore tests
        purposes."""
        self.ggu.config_review(self.clone_dir)
        self.ggu.add_commit_in_branch(self.clone_dir,
                                      branch,
                                      commit=commit_message)
        self.ggu.review_push_branch(self.clone_dir, branch)

    def create_review_for_issue(self, project, issue):
        self.create_review(
            project, 'test\n\nTask: #%s\nStory: #%s' % (issue[0], issue[1]),
            'branch_%s' % str(issue[0]))

    def create_local_user(self, username, password, email):
        self.msu.create_user(username, password, email)

    def command(self, cmd):
        return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root",
                           config.GATEWAY_HOST, shlex.split(cmd))

    def compute_checksum(self, f):
        out = self.command("md5sum %s" % f)[0]
        if out:
            return out.split()[0]

    def read_file(self, f):
        return self.command("cat %s" % f)[0]

    def provision(self):
        for cmd in self.resources['commands']:
            print "Execute command %s" % cmd['cmd']
            print self.command(cmd['cmd'])
        checksum_list = {}
        for checksum in self.resources['checksum']:
            print "Compute checksum for file %s" % checksum['file']
            checksum_list[checksum['file']] = self.compute_checksum(
                checksum['file'])
            checksum_list['content_' + checksum['file']] = self.read_file(
                checksum['file'])
        yaml.dump(checksum_list,
                  file('pc_checksums.yaml', 'w'),
                  default_flow_style=False)
        for user in self.resources['local_users']:
            print "Create local user %s" % user['username']
            self.create_local_user(user['username'], user['password'],
                                   user['email'])
            self.simple_login(user['username'], user['password'])
        for u in self.resources['users']:
            print "log in as %s" % u['name']
            self.simple_login(u['name'], config.USERS[u['name']]['password'])
        for project in self.resources['projects']:
            print "Create user datas for %s" % project['name']
            self.create_project(project['name'])
            self.push_files_in_project(project['name'],
                                       [f['name'] for f in project['files']])
            for i, review in self.create_issues_on_project(
                    project['name'], project['issues']):
                if review:
                    print "Create review for bug %s in %s" % (i,
                                                              project['name'])
                    self.create_review_for_issue(project['name'], i)
        self.create_resources()
        self.create_pads(2)
        self.create_pasties(2)
class TestProjectTestsWorkflow(Base):
    """ Functional tests to verify the configuration of a project test
    """
    @classmethod
    def setUpClass(cls):
        cls.msu = ManageSfUtils(config.GATEWAY_URL)
        cls.sample_project_dir = \
            os.path.join(config.SF_TESTS_DIR, "sample_project/")

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.projects = []
        self.dirs_to_delete = []
        self.un = config.ADMIN_USER
        self.gu = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[self.un]['auth_cookie'])
        self.gu2 = GerritUtils(
            config.GATEWAY_URL,
            auth_cookie=config.USERS[config.USER_2]['auth_cookie'])
        self.ju = JenkinsUtils()
        self.gu.add_pubkey(config.USERS[self.un]["pubkey"])
        priv_key_path = set_private_key(config.USERS[self.un]["privkey"])
        self.gitu_admin = GerritGitUtils(self.un,
                                         priv_key_path,
                                         config.USERS[self.un]['email'])
        # Clone the config repo and make change to it
        # in order to test the new sample_project
        self.config_clone_dir = self.clone_as_admin("config")
        self.original_layout = file(os.path.join(
            self.config_clone_dir, "zuul/layout.yaml")).read()
        self.original_zuul_projects = file(os.path.join(
            self.config_clone_dir, "zuul/projects.yaml")).read()
        self.original_project = file(os.path.join(
            self.config_clone_dir, "jobs/projects.yaml")).read()
        # Put USER_2 as core for config project
        self.gu.add_group_member(config.USER_2, "config-core")

    def tearDown(self):
        self.restore_config_repo(self.original_layout,
                                 self.original_project,
                                 self.original_zuul_projects)
        for name in self.projects:
            self.msu.deleteProject(name,
                                   config.ADMIN_USER)
        for dirs in self.dirs_to_delete:
            shutil.rmtree(dirs)

    def assert_reviewer_approvals(self, change_id, value):
        approvals = {}
        for _ in range(90):
            approvals = self.gu.get_reviewer_approvals(change_id,
                                                       'jenkins')
            if approvals and approvals.get('Verified') == value:
                break
            time.sleep(1)
        self.assertEqual(value, approvals.get('Verified'))

    def clone_as_admin(self, pname):
        url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST,
                                        pname)
        clone_dir = self.gitu_admin.clone(url, pname)
        if os.path.dirname(clone_dir) not in self.dirs_to_delete:
            self.dirs_to_delete.append(os.path.dirname(clone_dir))
        return clone_dir

    def restore_config_repo(self, layout, project, zuul):
        file(os.path.join(
            self.config_clone_dir, "zuul/layout.yaml"), 'w').write(
            layout)
        file(os.path.join(
            self.config_clone_dir, "zuul/projects.yaml"), 'w').write(
            zuul)
        file(os.path.join(
            self.config_clone_dir, "jobs/projects.yaml"), 'w').write(
            project)
        self.commit_direct_push_as_admin(
            self.config_clone_dir,
            "Restore layout.yaml and projects.yaml")

    def commit_direct_push_as_admin(self, clone_dir, msg):
        # Stage, commit and direct push the additions on master
        self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg)
        self.gitu_admin.direct_push_branch(clone_dir, 'master')

    def push_review_as_admin(self, clone_dir, msg):
        # Stage, commit and direct push the additions on master
        self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg)
        self.gitu_admin.review_push_branch(clone_dir, 'master')

    def create_project(self, name, user,
                       options=None):
        self.msu.createProject(name, user,
                               options)
        self.projects.append(name)

    def test_check_project_test_workflow(self):
        """ Validate new project to test via zuul layout.yaml
        """
        # We want to create a project, provide project source
        # code with tests. We then configure zuul/jjb to handle the
        # run of the test cases. We then validate Gerrit has been
        # updated about the test results
        # We use the sample-project (that already exists)

        pname = 'test_workflow_%s' % create_random_str()
        # Be sure the project does not exist
        self.msu.deleteProject(pname,
                               config.ADMIN_USER)
        # Create it
        self.create_project(pname, config.ADMIN_USER)

        # Add the sample-project to the empty repository
        clone_dir = self.clone_as_admin(pname)
        copytree(self.sample_project_dir, clone_dir)
        self.commit_direct_push_as_admin(clone_dir, "Add the sample project")

        # Change to config/zuul/layout.yaml and jobs/projects.yaml
        # in order to test the new project
        ycontent = file(os.path.join(
            self.config_clone_dir, "zuul/projects.yaml")).read()
        file(os.path.join(
            self.config_clone_dir, "zuul/projects.yaml"), 'w').write(
            ycontent.replace("zuul-demo", pname),
        )
        ycontent2 = load(file(os.path.join(
            self.config_clone_dir, "jobs/projects.yaml")).read())
        sp2 = copy.deepcopy(
            [p for p in ycontent2 if 'project' in p and
                p['project']['name'] == 'zuul-demo'][0])
        sp2['project']['name'] = pname
        ycontent2.append(sp2)
        file(os.path.join(
            self.config_clone_dir, "jobs/projects.yaml"), 'w').write(
            dump(ycontent2))

        # Retrieve the previous build number for config-check
        last_success_build_num_ch = \
            self.ju.get_last_build_number("config-check",
                                          "lastSuccessfulBuild")
        # Retrieve the previous build number for config-update
        last_success_build_num_cu = \
            self.ju.get_last_build_number("config-update",
                                          "lastSuccessfulBuild")

        # Send review (config-check) will be triggered
        self.push_review_as_admin(
            self.config_clone_dir,
            "Add config definition in Zuul/JJB config for %s" % pname)

        # Wait for config-check to finish and verify the success
        self.ju.wait_till_job_completes("config-check",
                                        last_success_build_num_ch,
                                        "lastSuccessfulBuild")

        last_build_num_ch, last_success_build_num_ch = 0, 1
        attempt = 0
        while last_build_num_ch != last_success_build_num_ch:
            if attempt >= 90:
                break
            time.sleep(1)
            last_build_num_ch = \
                self.ju.get_last_build_number("config-check",
                                              "lastBuild")
            last_success_build_num_ch = \
                self.ju.get_last_build_number("config-check",
                                              "lastSuccessfulBuild")
            attempt += 1

        self.assertEqual(last_build_num_ch, last_success_build_num_ch)
        # let some time to Zuul to update the test result to Gerrit.
        time.sleep(2)

        # Get the change id
        change_ids = self.gu.get_my_changes_for_project("config")
        self.assertGreater(len(change_ids), 0)
        change_id = change_ids[0]

        # Check whether zuul sets verified to +1 after running the tests
        # let some time to Zuul to update the test result to Gerrit.
        self.assert_reviewer_approvals(change_id, '+1')

        # review the change
        self.gu2.submit_change_note(change_id, "current", "Code-Review", "2")
        self.gu2.submit_change_note(change_id, "current", "Workflow", "1")

        # now zuul processes gate pipeline and runs config-check job
        # Wait for config-check to finish and verify the success
        self.ju.wait_till_job_completes("config-check",
                                        last_success_build_num_ch,
                                        "lastSuccessfulBuild")

        last_build_num_ch, last_success_build_num_ch = 0, 1
        attempt = 0
        while last_build_num_ch != last_success_build_num_ch:
            if attempt >= 90:
                break
            time.sleep(1)
            last_build_num_ch = \
                self.ju.get_last_build_number("config-check",
                                              "lastBuild")
            last_success_build_num_ch = \
                self.ju.get_last_build_number("config-check",
                                              "lastSuccessfulBuild")
            attempt += 1

        self.assertEqual(last_build_num_ch, last_success_build_num_ch)

        # Check whether zuul sets verified to +2 after running the tests
        # let some time to Zuul to update the test result to Gerrit.
        self.assert_reviewer_approvals(change_id, '+2')

        # verify whether zuul merged the patch
        change = self.gu.get_change('config', 'master', change_id)
        change_status = change['status']
        attempt = 0
        while change_status != 'MERGED':
            if attempt >= 90:
                break
            time.sleep(1)
            change = self.gu.get_change('config', 'master', change_id)
            change_status = change['status']
            attempt += 1
        self.assertEqual(change_status, 'MERGED')

        # Test post pipe line
        # as the patch is merged, post pieline should run config-update job
        # Wait for config-update to finish and verify the success
        self.ju.wait_till_job_completes("config-update",
                                        last_success_build_num_cu,
                                        "lastSuccessfulBuild")
        last_build_num_cu = \
            self.ju.get_last_build_number("config-update",
                                          "lastBuild")
        last_success_build_num_cu = \
            self.ju.get_last_build_number("config-update",
                                          "lastSuccessfulBuild")
        self.assertEqual(last_build_num_cu, last_success_build_num_cu)

        # Retrieve the prev build number for pname-unit-tests
        # Retrieve the prev build number for pname-functional-tests
        last_success_build_num_sp_ut = \
            self.ju.get_last_build_number("%s-unit-tests" % pname,
                                          "lastSuccessfulBuild")
        last_success_build_num_sp_ft = \
            self.ju.get_last_build_number("%s-functional-tests" % pname,
                                          "lastSuccessfulBuild")
        # Test config-update
        # config-update should have created jobs for pname
        # Trigger tests on pname
        # Send a review and check tests has been run
        self.gitu_admin.add_commit_and_publish(
            clone_dir, 'master', "Add useless file",
            self.un)
        # Wait for pname-unit-tests to finish and verify the success
        self.ju.wait_till_job_completes("%s-unit-tests" % pname,
                                        last_success_build_num_sp_ut,
                                        "lastSuccessfulBuild")
        # Wait for pname-functional-tests to end and check the success
        self.ju.wait_till_job_completes("%s-functional-tests" % pname,
                                        last_success_build_num_sp_ft,
                                        "lastSuccessfulBuild")
        # Check the unit tests succeed
        last_build_num_sp_ut = \
            self.ju.get_last_build_number("%s-unit-tests" % pname,
                                          "lastBuild")
        last_success_build_num_sp_ut = \
            self.ju.get_last_build_number("%s-unit-tests" % pname,
                                          "lastSuccessfulBuild")
        self.assertEqual(last_build_num_sp_ut, last_success_build_num_sp_ut)
        # Check the functional tests succeed
        last_build_num_sp_ft = \
            self.ju.get_last_build_number("%s-functional-tests" % pname,
                                          "lastBuild")
        last_success_build_num_sp_ft = \
            self.ju.get_last_build_number("%s-functional-tests" % pname,
                                          "lastSuccessfulBuild")
        self.assertEqual(last_build_num_sp_ft, last_success_build_num_sp_ft)

        # Get the change id
        change_ids = self.gu.get_my_changes_for_project(pname)
        self.assertGreater(len(change_ids), 0)
        change_id = change_ids[0]

        # let some time to Zuul to update the test result to Gerrit.
        for i in range(90):
            if "jenkins" in self.gu.get_reviewers(change_id):
                break
            time.sleep(1)

        self.assert_reviewer_approvals(change_id, '+1')
Esempio n. 56
0
class SFchecker:
    """ This checker is only intended for testin
    SF backup/restore and update. It checks that the user
    data defined in resourses.yaml are present on the SF.

    Those data must have been provisioned by SFProvisioner.
    """
    def __init__(self):
        with open("%s/resources.yaml" % pwd, 'r') as rsc:
            self.resources = yaml.load(rsc)
        config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie(
            config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password'])
        self.gu = GerritUtils(
            'http://%s/' % config.GATEWAY_HOST,
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
        self.ggu = GerritGitUtils(config.ADMIN_USER,
                                  config.ADMIN_PRIV_KEY_PATH,
                                  config.USERS[config.ADMIN_USER]['email'])
        self.ju = JenkinsUtils()
        self.rm = RedmineUtils(
            config.GATEWAY_URL + "/redmine/",
            auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])

    def check_project(self, name):
        print " Check project %s exists ..." % name,
        if not self.gu.project_exists(name) or \
           (is_present('SFRedmine') and not self.rm.project_exists(name)):
            print "FAIL"
            exit(1)
        print "OK"

    def check_files_in_project(self, name, files):
        print " Check files(%s) exists in project ..." % ",".join(files),
        # TODO(fbo); use gateway host instead of gerrit host
        url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER,
                                        config.GATEWAY_HOST, name)
        clone_dir = self.ggu.clone(url, name, config_review=False)
        for f in files:
            if not os.path.isfile(os.path.join(clone_dir, f)):
                print "FAIL"
                exit(1)

    def check_issues_on_project(self, name, issues):
        print " Check that at least %s issues exists for that project ...," %\
            len(issues)
        current_issues = self.rm.get_issues_by_project(name)
        if len(current_issues) < len(issues):
            print "FAIL: expected %s, project has %s" % (
                len(issues), len(current_issues))
            exit(1)
        print "OK"

    def check_jenkins_jobs(self, name, jobnames):
        print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames),
        for jobname in jobnames:
            if not '%s_%s' % (name, jobname) in self.ju.list_jobs():
                print "FAIL"
                exit(1)
        print "OK"

    def check_reviews_on_project(self, name, issues):
        reviews = [i for i in issues if i['review']]
        print " Check that at least %s reviews exists for that project ..." %\
            len(reviews),
        pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST)
        if not len(pending_reviews) >= len(reviews):
            print "FAIL"
            exit(1)
        print "OK"

    def check_pads(self, amount):
        pass

    def check_pasties(self, amount):
        pass

    def command(self, cmd):
        return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"),
                           "root",
                           config.GATEWAY_HOST, shlex.split(cmd))

    def compute_checksum(self, f):
        out = self.command("md5sum %s" % f)[0]
        if out:
            return out.split()[0]

    def read_file(self, f):
        return self.command("cat %s" % f)[0]

    def simple_login(self, user, password):
        """log as user"""
        return get_cookie(user, password)

    def check_users_list(self):
        print "Check that users are listable ...",
        users = [u['name'] for u in self.resources['users']]
        c = {'auth_pubtkt': config.USERS[config.ADMIN_USER]['auth_cookie']}
        url = 'http://%s/manage/project/membership/' % config.GATEWAY_HOST
        registered = requests.get(url,
                                  cookies=c).json()
        # usernames are in first position
        r_users = [u[0] for u in registered]
        if not set(users).issubset(set(r_users)):
            print "FAIL"
            exit(1)
        print "OK"

    def check_checksums(self):
        print "Check that expected file are there"
        checksum_list = yaml.load(file('/tmp/pc_checksums.yaml'))
        mismatch = False
        for f, checksum in checksum_list.items():
            c = self.compute_checksum(f)
            if c == checksum:
                print "Expected checksum (%s) for %s is OK." % (
                    checksum, f)
            else:
                print "Expected checksum (%s) for %s is WRONG (%s)." % (
                    checksum, f, c)
                print "New file is:"
                print "    %s" % self.read_file(f).replace("\n", "\n    ")
                mismatch = True
        if "checksum_warn_only" not in sys.argv and mismatch:
            sys.exit(1)

    def checker(self):
        self.check_checksums()
        self.check_users_list()
        for project in self.resources['projects']:
            print "Check user datas for %s" % project['name']
            self.check_project(project['name'])
            self.check_files_in_project(project['name'],
                                        [f['name'] for f in project['files']])
            if is_present('SFRedmine'):
                self.check_issues_on_project(project['name'],
                                             project['issues'])
            self.check_reviews_on_project(project['name'], project['issues'])
            self.check_jenkins_jobs(project['name'],
                                    [j['name'] for j in project['jobnames']])
        self.check_pads(2)
        self.check_pasties(2)
        for user in self.resources['local_users']:
            print "Check user %s can log in ..." % user['username'],
            if self.simple_login(user['username'],
                                 user['password']):
                print "OK"
            else:
                print "FAIL"
                exit(1)