def tearDown(self):
        # Clean up files created during this test
        with suppress(FileNotFoundError):
            os.remove(self.test_conf_file_path)

        # Give the cluster a bit of extra time to finish working (before forcefully killing it and failing the test)
        with suppress(TestClusterTimeoutError):
            self.cluster.block_until_build_queue_empty(timeout=5)

        # Kill processes and make sure all processes exited with 0 exit code
        services = self.cluster.kill()

        # only check the exit code if not on Windows as Popen.terminate kills the process on Windows and the exit
        # code is not zero.
        # TODO: remove the is_windows() check after we can handle exit on Windows gracefully.
        if not is_windows():
            for service in services:
                self.assertEqual(
                    service.return_code,
                    0,
                    'Service running on url: {} should exit with code 0, but exited with code {}.'.format(
                        service.url,
                        service.return_code,
                    ),
                )
        # Remove the temp dir. This will delete the log files, so should be run after cluster shuts down.
        self.test_app_base_dir.cleanup()
    def tearDown(self):
        # Give the cluster a bit of extra time to finish working (before forcefully killing it and failing the test)
        with suppress(TestClusterTimeoutError):
            self.cluster.block_until_build_queue_empty(timeout=5)

        # Kill processes and make sure all processes exited with 0 exit code
        services = self.cluster.kill()

        # only check the exit code if not on Windows as Popen.terminate kills the process on Windows and the exit
        # code is not zero.
        # TODO: remove the is_windows() check after we can handle exit on Windows gracefully.
        if not is_windows():
            for service in services:
                self.assertEqual(
                    service.return_code,
                    0,
                    'Service running on url: {} should exit with code 0, but exited with code {}.'
                    .format(
                        service.url,
                        service.return_code,
                    ),
                )
        # Remove the temp dir. This will delete the log files, so should be run after cluster shuts down.
        self.cluster.master_app_base_dir.cleanup()
        [
            slave_app_base_dir.cleanup()
            for slave_app_base_dir in self.cluster.slaves_app_base_dirs
        ]
示例#3
0
 def read_config_from_disk(self):
     """
     Parse an INI-style config file from disk.
     """
     if not os.path.isfile(self._filename):
         raise FileNotFoundError('Conf file {} does not exist'.format(self._filename))
     file_mode = stat.S_IMODE(os.stat(self._filename).st_mode)
     if not is_windows() and file_mode != self.CONFIG_FILE_MODE:
         raise PermissionError('The conf file {} has incorrect permissions, '
                               'should be 0600 for security reasons'.format(self._filename))
     config_parsed = ConfigObj(self._filename)
     return config_parsed
示例#4
0
 def read_config_from_disk(self):
     """
     Parse an INI-style config file from disk.
     """
     if not os.path.isfile(self._filename):
         raise FileNotFoundError('Conf file {} does not exist'.format(
             self._filename))
     file_mode = stat.S_IMODE(os.stat(self._filename).st_mode)
     if not is_windows() and file_mode != self.CONFIG_FILE_MODE:
         raise PermissionError(
             'The conf file {} has incorrect permissions, '
             'should be 0600 for security reasons'.format(self._filename))
     config_parsed = ConfigObj(self._filename)
     return config_parsed
示例#5
0
    'create_shared_zip':
    True,
    'include_in_shared_zip':
    True,
    'include_files': [
        ('bin/git_askpass.sh', 'bin/git_askpass.sh'),
        ('bin/git_ssh.sh', 'bin/git_ssh.sh'),
        ('conf/default_clusterrunner.conf', 'conf/default_clusterrunner.conf'),
    ],
    'optimize':
    1,  # This should not be set to 2 because that removes docstrings needed for command line help.
}

base = 'Console'

executable_name = 'clusterrunner.exe' if is_windows() else 'clusterrunner'
executables = [Executable('main.py', base=base, targetName=executable_name)]

if sys.platform.startswith('linux'):
    # Fixes compatibility between rhel and ubuntu
    bin_includes = ['/usr/lib64/libssl.so.10', '/usr/lib64/libcrypto.so.10']
    file_exists = [os.path.isfile(filename) for filename in bin_includes]

    if all(file_exists):
        buildOptions['bin_includes'] = bin_includes

version = autoversioning.get_version()
autoversioning.write_package_version_file(version)

setup(name='ClusterRunner',
      version=version,
示例#6
0
import subprocess
import tempfile
import time
from unittest import skipIf
import yaml

from app.master.build_fsm import BuildState
from app.util import poll
from app.util.process_utils import is_windows
from test.framework.functional.base_functional_test_case import BaseFunctionalTestCase
from test.functional.job_configs import JOB_WITH_SLEEPS_90SEC, JOB_WITH_SLEEPING_ATOMIZER_90SEC

ATOMIZER_PID_FILE = '/tmp/atomizer_pid.txt'


@skipIf(is_windows(), 'Fails on AppVeyor; see issue #345')
class TestBuildCancellation(BaseFunctionalTestCase):
    def test_build_cancellation_while_building(self):
        master = self.cluster.start_master()
        # Only one slave, with one executor. This means that the slave should be able to
        # theoretically finish the build in 90 seconds, as this job definition has 90 atoms,
        # with each sleeping for 1 second.
        slaves = self.cluster.start_slaves(1,
                                           num_executors_per_slave=1,
                                           start_port=43001)
        project_dir = tempfile.TemporaryDirectory()
        build_resp = master.post_new_build({
            'type':
            'directory',
            'config':
            yaml.safe_load(
示例#7
0
    'build_exe': 'dist',
    'compressed': True,
    'copy_dependent_files': True,
    'create_shared_zip': True,
    'include_in_shared_zip': True,
    'include_files': [
        ('bin/git_askpass.sh', 'bin/git_askpass.sh'),
        ('bin/git_ssh.sh', 'bin/git_ssh.sh'),
        ('conf/default_clusterrunner.conf', 'conf/default_clusterrunner.conf'),
    ],
    'optimize': 1,  # This should not be set to 2 because that removes docstrings needed for command line help.
}

base = 'Console'

executable_name = 'clusterrunner.exe' if is_windows() else 'clusterrunner'
executables = [
    Executable('main.py', base=base, targetName=executable_name)
]

if sys.platform.startswith('linux'):
    # Fixes compatibility between rhel and ubuntu
    bin_includes = ['/usr/lib64/libssl.so.10', '/usr/lib64/libcrypto.so.10']
    file_exists = [os.path.isfile(filename) for filename in bin_includes]

    if all(file_exists):
        buildOptions['bin_includes'] = bin_includes

version = autoversioning.get_version()
autoversioning.write_package_version_file(version)
示例#8
0
class TestGit(BaseUnitTestCase):

    def setUp(self):
        super().setUp()
        self.patch('app.project_type.git.fs.create_dir')
        self.patch('os.unlink')
        self.patch('os.symlink')

        self.os_path_exists_mock = self.patch('app.project_type.git.os.path.exists')
        self.os_path_exists_mock.return_value = False
        self.os_path_isfile_mock = self.patch('app.project_type.git.os.path.isfile')
        self.os_path_isfile_mock.return_value = False

    def test_timing_file_path_happy_path(self):
        git_env = Git("ssh://scm.dev.box.net/box/www/current", 'origin', 'refs/changes/78/151978/27')
        actual_timing_file_sys_path = git_env.timing_file_path('QUnit')
        expected_timing_file_sys_path = join(
            Configuration['base_directory'],
            'timings',
            'master',
            'scm.dev.box.net',
            'box',
            'www',
            'current',
            'QUnit.timing.json',
        )
        self.assertEquals(expected_timing_file_sys_path, actual_timing_file_sys_path)

    def test_execute_command_in_project_specifies_cwd_if_exists(self):
        self.os_path_exists_mock.return_value = True
        project_type_popen_patch = self._patch_popen()

        fake_project_directory = 'proj_dir'
        fake_command = 'some_command'
        git_env = Git("ssh://scm.dev.box.net/box/www/current", 'origin', 'refs/changes/78/151978/27')
        git_env.project_directory = fake_project_directory
        git_env.execute_command_in_project(fake_command)
        env_setter = get_environment_variable_setter_command('PROJECT_DIR', fake_project_directory)
        project_type_popen_patch.assert_called_once_with(
            '{} {}'.format(env_setter, fake_command),
            cwd=fake_project_directory,
            shell=ANY,
            stdout=ANY,
            stderr=ANY,
            start_new_session=ANY,
        )

    def test_execute_command_in_project_type_specifies_cwd_if_doesnt_exist(self):
        project_type_popen_patch = self._patch_popen()

        fake_project_directory = 'proj_dir'
        fake_command = 'some_command'
        git_env = Git("ssh://scm.dev.box.net/box/www/current", 'origin', 'refs/changes/78/151978/27')
        git_env.project_directory = fake_project_directory
        git_env.execute_command_in_project(fake_command)
        env_setter = get_environment_variable_setter_command('PROJECT_DIR', fake_project_directory)
        project_type_popen_patch.assert_called_once_with(
            '{} {}'.format(env_setter, fake_command),
            cwd=None,
            shell=ANY,
            stdout=ANY,
            stderr=ANY,
            start_new_session=ANY,
        )

    def test_get_full_repo_directory(self):
        Configuration['repo_directory'] = join(expanduser('~'), '.clusterrunner', 'repos')
        url = 'http://scm.example.com/path/to/project'

        actual_repo_sys_path = Git.get_full_repo_directory(url)

        expected_repo_sys_path = join(
            Configuration['repo_directory'],
            'scm.example.com',
            'path',
            'to',
            'project',
        )
        self.assertEqual(expected_repo_sys_path, actual_repo_sys_path)

    def test_get_timing_file_directory(self):
        Configuration['timings_directory'] = join(expanduser('~'), '.clusterrunner', 'timing')
        url = 'http://scm.example.com/path/to/project'

        actual_timings_sys_path = Git.get_timing_file_directory(url)

        expected_timings_sys_path = join(
            Configuration['timings_directory'],
            'scm.example.com',
            'path',
            'to',
            'project',
        )

        self.assertEqual(expected_timings_sys_path, actual_timings_sys_path)

    def test_get_repo_directory_removes_colon_from_directory_if_exists(self):
        Configuration['repo_directory'] = join(expanduser('~'), 'tmp', 'repos')
        git = Git("some_remote_value", 'origin', 'ref/to/some/branch')

        actual_repo_directory = git.get_full_repo_directory('ssh://source_control.cr.com:1234/master')
        expected_repo_directory = join(
            Configuration['repo_directory'],
            'source_control.cr.com1234',
            'master'
        )

        self.assertEqual(expected_repo_directory, actual_repo_directory)

    def test_get_timing_file_directory_removes_colon_from_directory_if_exists(self):
        Configuration['timings_directory'] = join(expanduser('~'), 'tmp', 'timings')
        git = Git("some_remote_value", 'origin', 'ref/to/some/branch')

        actual_timing_directory = git.get_timing_file_directory('ssh://source_control.cr.com:1234/master')
        expected_timing_directory = join(
            Configuration['timings_directory'],
            'source_control.cr.com1234',
            'master',
        )

        self.assertEqual(expected_timing_directory, actual_timing_directory)

    def test_fetch_project_when_existing_repo_is_shallow_deletes_repo(self):
        self.os_path_isfile_mock.return_value = True
        self.os_path_exists_mock.return_value = True
        mock_fs = self.patch('app.project_type.git.fs')
        mock_rmtree = self.patch('shutil.rmtree')

        git = Git('url')
        git._repo_directory = 'fake/repo_path'
        git._execute_and_raise_on_failure = MagicMock()
        git.execute_command_in_project = Mock(return_value=('', 0))

        mock_fs.create_dir.call_count = 0  # only measure calls made in _fetch_project
        mock_rmtree.call_count = 0

        git._fetch_project()

        mock_rmtree.assert_called_once_with('fake/repo_path')
        mock_fs.create_dir.assert_called_once_with('fake/repo_path', Git.DIRECTORY_PERMISSIONS)

    @genty_dataset(
        failed_rev_parse=(1, True),
        successful_rev_parse=(0, False),
    )
    def test_repo_is_cloned_if_and_only_if_rev_parse_fails(self, rev_parse_return_code, expect_git_clone_call):
        mock_popen = self._patch_popen({
            'git rev-parse$': _FakePopenResult(return_code=rev_parse_return_code)
        })
        Configuration['repo_directory'] = '/repo-directory'

        git = Git(url='http://original-user-specified-url.test/repo-path/repo-name')
        git.fetch_project()

        git_clone_call = call(AnyStringMatching('git clone'), start_new_session=ANY,
                              stdout=ANY, stderr=ANY, cwd=ANY, shell=ANY)
        if expect_git_clone_call:
            self.assertIn(git_clone_call, mock_popen.call_args_list, 'If "git rev-parse" returns a failing exit code, '
                                                                     '"git clone" should be called.')
        else:
            self.assertNotIn(git_clone_call, mock_popen.call_args_list, 'If "git rev-parse" returns a successful exit '
                                                                        'code, "git clone" should not be called.')

    @genty_dataset(
        strict_host_checking_is_on=(True,),
        strict_host_checking_is_off=(False,),
    )
    def test_execute_git_command_auto_sets_strict_host_option_correctly(self, strict_host_check_setting):
        Configuration['git_strict_host_key_checking'] = strict_host_check_setting
        popen_mock = self._patch_popen()

        git = Git(url='http://some-user-url.com/repo-path/repo-name')
        git._execute_git_command_in_repo_and_raise_on_failure('fakecmd')

        if strict_host_check_setting:
            expected_ssh_arg = '-o StrictHostKeyChecking=yes'
        else:
            expected_ssh_arg = '-o StrictHostKeyChecking=no'

        expected_call = call(AnyStringMatching(expected_ssh_arg),
                             start_new_session=ANY, stdout=ANY, stderr=ANY, cwd=ANY, shell=ANY)
        self.assertIn(expected_call, popen_mock.call_args_list, 'Executed git command should include the correct '
                                                                'option for StrictHostKeyChecking.')

    @skipIf(is_windows(), 'Skipping test for cloning repo from master on Windows')
    def test_slave_param_overrides_returns_expected(self):
        Configuration['get_project_from_master'] = True
        Configuration['repo_directory'] = '/repo-directory'
        self._patch_popen({
            'git rev-parse FETCH_HEAD': _FakePopenResult(stdout='deadbee123\n')
        })

        git = Git(url='http://original-user-specified-url.test/repo-path/repo-name')
        git.fetch_project()
        actual_overrides = git.slave_param_overrides()

        expected_overrides = {
            'url': 'ssh://fake_hostname/repodirectory/originaluserspecifiedurl.test/repopath/reponame',
            'branch': 'refs/clusterrunner/deadbee123',
        }
        self.assertEqual(expected_overrides, actual_overrides, 'Slave param overrides from Git object should match'
                                                               'expected.')

    def test_slave_param_overrides_when_get_project_from_master_is_disabled(self):
        Configuration['get_project_from_master'] = False

        git = Git(url='http://original-user-specified-url.test/repo-path/repo-name')
        actual_overrides = git.slave_param_overrides()

        self.assertFalse(
            'url' in actual_overrides,
            '"url" should not be in the params to override when "get_project_from_master" is False',
        )
        self.assertFalse(
            'branch' in actual_overrides,
            '"branch" should not be in the params to override when "get_project_from_master" is False',
        )

    def _patch_popen(self, command_to_result_map=None):
        """
        Mock out calls to Popen to inject fake results for specific command strings.

        :param command_to_result_map: A dict that maps a command string regex to a _FakePopenResult object
        :type command_to_result_map: dict[str, _FakePopenResult]
        :return: The patched popen constructor mock
        :rtype: MagicMock
        """
        command_to_result_map = command_to_result_map or {}
        self.patch('app.project_type.project_type.TemporaryFile', new=lambda: Mock())
        project_type_popen_patch = self.patch('app.project_type.project_type.Popen_with_delayed_expansion')

        def fake_popen_constructor(command, stdout, stderr, *args, **kwargs):
            fake_result = _FakePopenResult()  # default value
            for command_regex in command_to_result_map:
                if re.search(command_regex, command):
                    fake_result = command_to_result_map[command_regex]
                    break
            stdout.read.return_value = fake_result.stdout.encode()
            stderr.read.return_value = fake_result.stderr.encode()
            return Mock(spec=Popen, returncode=fake_result.return_code)

        project_type_popen_patch.side_effect = fake_popen_constructor
        return project_type_popen_patch
import subprocess
import tempfile
import time
from unittest import skipIf
import yaml

from app.master.build_fsm import BuildState
from app.util import poll
from app.util.process_utils import is_windows
from test.framework.functional.base_functional_test_case import BaseFunctionalTestCase
from test.functional.job_configs import JOB_WITH_SLEEPS_90SEC, JOB_WITH_SLEEPING_ATOMIZER_90SEC


ATOMIZER_PID_FILE='/tmp/atomizer_pid.txt'

@skipIf(is_windows(), 'Fails on AppVeyor; see issue #345')
class TestBuildCancellation(BaseFunctionalTestCase):
    def test_build_cancellation_while_building(self):
        master = self.cluster.start_master()
        # Only one slave, with one executor. This means that the slave should be able to
        # theoretically finish the build in 90 seconds, as this job definition has 90 atoms,
        # with each sleeping for 1 second.
        slaves = self.cluster.start_slaves(1, num_executors_per_slave=1, start_port=43001)
        project_dir = tempfile.TemporaryDirectory()
        build_resp = master.post_new_build({
            'type': 'directory',
            'config': yaml.safe_load(JOB_WITH_SLEEPS_90SEC.config[os.name])['SleepingJob90Sec'],
            'project_directory': project_dir.name,
        })
        build_id = build_resp['build_id']
        self.assertTrue(master.block_until_build_started(build_id, timeout=30),