def handle_request_for_new_build(self, build_params): """ Creates a new Build object and adds it to the request queue to be processed. :param build_params: :type build_params: dict[str, str] :rtype tuple [bool, dict [str, str]] """ build_request = BuildRequest(build_params) success = False if build_request.is_valid(): build = Build(build_request) self._all_builds_by_id[build.build_id()] = build self._request_queue.put(build) analytics.record_event(analytics.BUILD_REQUEST_QUEUED, build_id=build.build_id()) response = {'build_id': build.build_id()} success = True elif not build_request.is_valid_type(): response = {'error': 'Invalid build request type.'} else: required_params = build_request.required_parameters() response = {'error': 'Missing required parameter. Required parameters: {}'.format(required_params)} return success, response
def handle_request_for_new_build(self, build_params): """ Creates a new Build object and adds it to the request queue to be processed. :param build_params: :type build_params: dict[str, str] :rtype tuple [bool, dict [str, str]] """ build_request = BuildRequest(build_params) success = False if build_request.is_valid(): build = Build(build_request) self._all_builds_by_id[build.build_id()] = build build.generate_project_type() self._build_request_handler.handle_build_request(build) response = {'build_id': build.build_id()} success = True elif not build_request.is_valid_type(): response = {'error': 'Invalid build request type.'} else: required_params = build_request.required_parameters() response = { 'error': 'Missing required parameter. Required parameters: {}'.format( required_params) } return success, response # todo: refactor to use exception instead of boolean
def handle_request_for_new_build(self, build_params): """ Creates a new Build object and adds it to the request queue to be processed. :param build_params: :type build_params: dict[str, str] :rtype tuple [bool, dict [str, str]] """ build_request = BuildRequest(build_params) success = False if build_request.is_valid(): build = Build(build_request) self._all_builds_by_id[build.build_id()] = build build.generate_project_type() # WIP(joey): This should be internal to the Build object. self._build_request_handler.handle_build_request(build) response = {'build_id': build.build_id()} success = True elif not build_request.is_valid_type(): response = {'error': 'Invalid build request type.'} else: required_params = build_request.required_parameters() response = {'error': 'Missing required parameter. Required parameters: {}'.format(required_params)} return success, response # todo: refactor to use exception instead of boolean
def setup(self, build: Build, executor_start_index: int) -> bool: """ Execute a setup command on the slave for the specified build. The setup process executes asynchronously on the slave and the slave will alert the master when setup is complete and it is ready to start working on subjobs. :param build: The build to set up this slave to work on :param executor_start_index: The index the slave should number its executors from for this build :return: Whether or not the call to start setup on the slave was successful """ slave_project_type_params = build.build_request.build_parameters().copy() slave_project_type_params.update(build.project_type.slave_param_overrides()) setup_url = self._slave_api.url('build', build.build_id(), 'setup') post_data = { 'project_type_params': slave_project_type_params, 'build_executor_start_index': executor_start_index, } self.current_build_id = build.build_id() try: self._network.post_with_digest(setup_url, post_data, Secret.get()) except (requests.ConnectionError, requests.Timeout) as ex: self._logger.warning('Setup call to {} failed with {}: {}.', self, ex.__class__.__name__, str(ex)) self.mark_dead() return False return True
def setup(self, build: Build, executor_start_index: int) -> bool: """ Execute a setup command on the slave for the specified build. The setup process executes asynchronously on the slave and the slave will alert the master when setup is complete and it is ready to start working on subjobs. :param build: The build to set up this slave to work on :param executor_start_index: The index the slave should number its executors from for this build :return: Whether or not the call to start setup on the slave was successful """ slave_project_type_params = build.build_request.build_parameters( ).copy() slave_project_type_params.update( build.project_type.slave_param_overrides()) setup_url = self._slave_api.url('build', build.build_id(), 'setup') post_data = { 'project_type_params': slave_project_type_params, 'build_executor_start_index': executor_start_index, } self.current_build_id = build.build_id() try: self._network.post_with_digest(setup_url, post_data, Secret.get()) except (requests.ConnectionError, requests.Timeout) as ex: self._logger.warning('Setup call to {} failed with {}: {}.', self, ex.__class__.__name__, str(ex)) self.mark_dead() return False return True
def test_deserialized_build_api_representation_is_same_as_original_build_no_failures( self): build = Build( BuildRequest({ 'type': 'git', 'url': 'git@name/repo.git', 'job_name': 'Example' })) build.generate_project_type() BuildStore.add(build) reconstructed_build = Build.load_from_db(build.build_id()) original_build_results = build.api_representation() reconstructed_build_results = reconstructed_build.api_representation() diff = self._compare_dictionaries_with_same_keys( original_build_results, reconstructed_build_results) # The build_project_directory is an auto generated tmp directory -- these will never be the same diff.pop('request_params|build_project_directory', None) # This is very similar to self.assertDictEqual, but here we won't consider different key orderings # as "not equal" which matters because `api_representation` does not have deterministic ordering self.assertEqual( diff, {}, 'Deserialized build is not the same as the original build.')
def test_exception_is_raised_if_problem_occurs_writing_subjob(self): Configuration['results_directory'] = abspath(join('some', 'temp', 'directory')) build = Build(BuildRequest({})) build._project_type = self._create_mock_project_type() subjob = self._create_subjobs(count=1, build_id=build.build_id())[0] build.prepare([subjob], self._create_job_config()) self.mock_fs.write_file.side_effect = FileExistsError with self.assertRaises(Exception): payload = {'filename': 'turtles.txt', 'body': 'Heroes in a half shell.'} build.complete_subjob(subjob.subjob_id(), payload=payload)
def test_complete_subjob_writes_and_extracts_payload_to_correct_directory(self): Configuration['results_directory'] = '/tmp/results' build = Build(BuildRequest({})) build._project_type = self._create_mock_project_type() subjob = self._create_subjobs(count=1, build_id=build.build_id())[0] build.prepare([subjob], self._create_job_config()) payload = {'filename': 'turtles.txt', 'body': 'Heroes in a half shell.'} build.complete_subjob(subjob.subjob_id(), payload=payload) self.mock_fs.write_file.assert_called_once_with('Heroes in a half shell.', '/tmp/results/1/turtles.txt') self.mock_fs.extract_tar.assert_called_once_with('/tmp/results/1/turtles.txt', delete=True)
def test_exception_is_raised_if_problem_occurs_writing_subjob(self): Configuration['results_directory'] = '/tmp/results' build = Build(BuildRequest({})) build._project_type = self._create_mock_project_type() subjob = self._create_subjobs(count=1, build_id=build.build_id())[0] build.prepare([subjob], self._create_job_config()) self.mock_fs.write_file.side_effect = FileExistsError with self.assertRaises(Exception): payload = { 'filename': 'turtles.txt', 'body': 'Heroes in a half shell.' } build.complete_subjob(subjob.subjob_id(), payload=payload)
def test_allocate_slave_calls_slave_setup(self): # arrange subjobs = self._create_subjobs() mock_project_type = self._create_mock_project_type() fake_setup_command = 'docker pull my:leg' mock_slave = self._create_mock_slave() # act build = Build(BuildRequest({'setup': fake_setup_command})) build.prepare(subjobs, mock_project_type, self._create_job_config(self._FAKE_MAX_EXECUTORS)) build.allocate_slave(mock_slave) # assert mock_slave.setup.assert_called_once_with(build.build_id(), project_type_params={'setup': fake_setup_command})
def test_allocate_slave_calls_slave_setup(self): # arrange subjobs = self._create_subjobs() mock_project_type = self._create_mock_project_type() fake_setup_command = 'docker pull my:leg' mock_slave = self._create_mock_slave() # act build = Build(BuildRequest({'setup': fake_setup_command})) build.prepare(subjobs, mock_project_type, self._create_job_config()) build.allocate_slave(mock_slave) # assert mock_slave.setup.assert_called_once_with( build.build_id(), project_type_params={'setup': fake_setup_command})
def test_complete_subjob_writes_and_extracts_payload_to_correct_directory( self): Configuration['results_directory'] = '/tmp/results' build = Build(BuildRequest({})) build._project_type = self._create_mock_project_type() subjob = self._create_subjobs(count=1, build_id=build.build_id())[0] build.prepare([subjob], self._create_job_config()) payload = { 'filename': 'turtles.txt', 'body': 'Heroes in a half shell.' } build.complete_subjob(subjob.subjob_id(), payload=payload) self.mock_fs.write_file.assert_called_once_with( 'Heroes in a half shell.', '/tmp/results/1/turtles.txt') self.mock_fs.extract_tar.assert_called_once_with( '/tmp/results/1/turtles.txt', delete=True)
def add(cls, build: Build): """ Add new build to collection :param build: The build to add to the store """ cls._all_builds_by_id[build.build_id()] = build
def test_add_build_to_store_sets_build_id(self, expected_build_id): build = Build(BuildRequest({})) BuildStore.add(build) self.assertEqual(build.build_id(), expected_build_id, 'The wrong build_id was set.')