def test_atoms_with_pagination_request(
            self,
            offset: Optional[int],
            limit: Optional[int],
            expected_first_atom_id: int,
            expected_last_atom_id: int,
            ):
        # Create 20 mock atoms with ids 1 to 20
        atoms = []
        for atom_id in range(1, self._NUM_ATOMS + 1):
            atom_mock = Mock(spec=Atom)
            atom_mock.id = atom_id
            atoms.append(atom_mock)

        build_id = 1
        subjob_id = 1
        project_type = None
        job_config = None
        subjob_atoms = atoms
        subjob = Subjob(build_id, subjob_id, project_type, job_config, atoms)

        requested_atoms = subjob.get_atoms(offset, limit)

        id_of_first_atom = requested_atoms[0].id if len(requested_atoms) else None
        id_of_last_atom = requested_atoms[-1].id if len(requested_atoms) else None
        num_atoms = len(requested_atoms)

        self.assertEqual(id_of_first_atom, expected_first_atom_id, 'Received the wrong first atom from request')
        self.assertEqual(id_of_last_atom, expected_last_atom_id, 'Received the wrong last atom from request')
        if offset is not None and limit is not None:
            self.assertLessEqual(num_atoms, self._PAGINATION_MAX_LIMIT, 'Received too many atoms from request')
Exemple #2
0
    def start_subjob(self, subjob: Subjob):
        """
        Send a subjob of a build to this slave. The slave must have already run setup for the corresponding build.
        :param subjob: The subjob to send to this slave
        """
        if not self.is_alive():
            raise DeadSlaveError('Tried to start a subjob on a dead slave.')
        if self._is_in_shutdown_mode:
            raise SlaveMarkedForShutdownError(
                'Tried to start a subjob on a slave in shutdown mode.')

        execution_url = self._slave_api.url('build', subjob.build_id(),
                                            'subjob', subjob.subjob_id())
        post_data = {'atomic_commands': subjob.atomic_commands()}
        try:
            response = self._network.post_with_digest(execution_url,
                                                      post_data,
                                                      Secret.get(),
                                                      error_on_failure=True)
        except (requests.ConnectionError, requests.Timeout,
                RequestFailedError) as ex:
            raise SlaveCommunicationError(
                'Call to slave service failed: {}.'.format(repr(ex))) from ex

        subjob_executor_id = response.json().get('executor_id')
        analytics.record_event(analytics.MASTER_TRIGGERED_SUBJOB,
                               executor_id=subjob_executor_id,
                               build_id=subjob.build_id(),
                               subjob_id=subjob.subjob_id(),
                               slave_id=self.id)
    def test_api_representation_matches_expected(self):
        job_config_command = 'fake command'
        subjob = Subjob(
            build_id=12,
            subjob_id=34,
            project_type=Mock(spec_set=ProjectType),
            job_config=Mock(spec=JobConfig, command=job_config_command),
            atoms=[
                Atom('BREAKFAST', 'pancakes', expected_time=23.4, actual_time=56.7),
                Atom('BREAKFAST', 'cereal', expected_time=89.0, actual_time=24.6),
            ],
        )

        actual_api_repr = subjob.api_representation()

        expected_api_repr = {
            'id': 34,
            'command': job_config_command,
            'atoms': [
                {
                    'id': 0,
                    'atom': get_environment_variable_setter_command('BREAKFAST', 'pancakes'),
                    'expected_time': 23.4,
                    'actual_time': 56.7,
                },
                {
                    'id': 1,
                    'atom': get_environment_variable_setter_command('BREAKFAST', 'cereal'),
                    'expected_time': 89.0,
                    'actual_time': 24.6,
                },
            ]
        }
        self.assertEqual(actual_api_repr, expected_api_repr, 'Actual api representation should match expected.')
Exemple #4
0
 def setUp(self):
     super().setUp()
     self._job_config_command = 'fake command'
     self._subjob = Subjob(
         build_id=12,
         subjob_id=34,
         project_type=Mock(spec_set=ProjectType),
         job_config=Mock(spec=JobConfig, command=self._job_config_command),
         atoms=[
             Atom(
                 'export BREAKFAST="pancakes";',
                 expected_time=23.4,
                 actual_time=56.7,
                 exit_code=1,
                 state=AtomState.NOT_STARTED,
                 atom_id=0,
             ),
             Atom(
                 'export BREAKFAST="cereal";',
                 expected_time=89.0,
                 actual_time=24.6,
                 exit_code=0,
                 state=AtomState.NOT_STARTED,
                 atom_id=1,
             ),
         ],
     )
    def test_api_representation_matches_expected(self):
        job_config_command = 'fake command'
        subjob = Subjob(
            build_id=12,
            subjob_id=34,
            project_type=Mock(spec_set=ProjectType),
            job_config=Mock(spec=JobConfig, command=job_config_command),
            atoms=[
                Atom('BREAKFAST',
                     'pancakes',
                     expected_time=23.4,
                     actual_time=56.7),
                Atom('BREAKFAST',
                     'cereal',
                     expected_time=89.0,
                     actual_time=24.6),
            ],
        )

        actual_api_repr = subjob.api_representation()

        expected_api_repr = {
            'id':
            34,
            'command':
            job_config_command,
            'atoms': [
                {
                    'id':
                    0,
                    'atom':
                    get_environment_variable_setter_command(
                        'BREAKFAST', 'pancakes'),
                    'expected_time':
                    23.4,
                    'actual_time':
                    56.7,
                },
                {
                    'id':
                    1,
                    'atom':
                    get_environment_variable_setter_command(
                        'BREAKFAST', 'cereal'),
                    'expected_time':
                    89.0,
                    'actual_time':
                    24.6,
                },
            ]
        }
        self.assertEqual(actual_api_repr, expected_api_repr,
                         'Actual api representation should match expected.')
Exemple #6
0
def compute_subjobs_for_build(build_id, job_config, project_type):
    """
    Calculate subjobs for a build.
    :type build_id: int
    :type job_config: JobConfig
    :param project_type: the project_type that the build is running in
    :type project_type: project_type.project_type.ProjectType
    :rtype: list[Subjob]
    """
    # Users can override the list of atoms to be run in this build. If the atoms_override
    # was specified, we can skip the atomization step and use those overridden atoms instead.
    if project_type.atoms_override is not None:
        atoms_string_list = project_type.atoms_override
        atoms_list = [
            Atom(atom_string_value) for atom_string_value in atoms_string_list
        ]
    else:
        atoms_list = job_config.atomizer.atomize_in_project(project_type)

    # Group the atoms together using some grouping strategy
    timing_file_path = project_type.timing_file_path(job_config.name)
    grouped_atoms = _grouped_atoms(atoms_list, job_config.max_executors,
                                   timing_file_path,
                                   project_type.project_directory)

    # Generate subjobs for each group of atoms
    subjobs = []
    for subjob_id, subjob_atoms in enumerate(grouped_atoms):
        # The atom id isn't calculated until the atom has been grouped into a subjob.
        for atom_id, atom in enumerate(subjob_atoms):
            atom.id = atom_id
        subjobs.append(
            Subjob(build_id, subjob_id, project_type, job_config,
                   subjob_atoms))
    return subjobs
    def _compute_subjobs_for_build(self, build_id, job_config, project_type):
        """

        :type build_id: int
        :type job_config: JobConfig
        :param project_type: the docker, directory, or git repo project_type that this build is running in
        :type project_type: project_type.project_type.ProjectType
        :rtype: list[Subjob]
        """
        try:
            atoms_list = job_config.atomizer.atomize_in_project(project_type)
        except AtomizerError as ex:
            raise BuildProjectError('Build failed during atomization.') from ex

        # Group the atoms together using some grouping strategy
        timing_file_path = project_type.timing_file_path(job_config.name)
        grouped_atoms = self._grouped_atoms(atoms_list,
                                            job_config.max_executors,
                                            timing_file_path,
                                            project_type.project_directory)

        # Generate subjobs for each group of atoms
        subjobs = []
        for subjob_id in range(len(grouped_atoms)):
            atoms = grouped_atoms[subjob_id]
            subjobs.append(
                Subjob(build_id, subjob_id, project_type, job_config, atoms))
        return subjobs
Exemple #8
0
 def setUp(self):
     super().setUp()
     self._job_config_command = 'fake command'
     self._subjob = Subjob(
         build_id=12,
         subjob_id=34,
         project_type=Mock(spec_set=ProjectType),
         job_config=Mock(spec=JobConfig, command=self._job_config_command),
         atoms=[
             Atom(
                 'export BREAKFAST="pancakes";',
                 expected_time=23.4,
                 actual_time=56.7,
                 exit_code=1,
                 state=AtomState.NOT_STARTED,
                 atom_id=0,
             ),
             Atom(
                 'export BREAKFAST="cereal";',
                 expected_time=89.0,
                 actual_time=24.6,
                 exit_code=0,
                 state=AtomState.NOT_STARTED,
                 atom_id=1,
             ),
         ],
     )
Exemple #9
0
 def _create_subjobs(self, count=3):
     return [
         Subjob(build_id=0,
                subjob_id=i,
                project_type=None,
                job_config=None,
                atoms=[]) for i in range(count)
     ]
Exemple #10
0
 def test_subjob_constructor_sets_subjob_id_on_atoms(self):
     atoms = [Mock(), Mock()]
     Subjob(build_id=1,
            subjob_id=4,
            project_type=Mock(),
            job_config=Mock(),
            atoms=atoms)
     for atom in atoms:
         self.assertEqual(atom.subjob_id, 4)
Exemple #11
0
 def _create_test_subjob(
         self, build_id=1234, subjob_id=456, project_type=None, job_config=None, atoms=None,
 ) -> Subjob:
     """Create a subjob for testing."""
     return Subjob(
         build_id=build_id,
         subjob_id=subjob_id,
         project_type=project_type or Mock(),
         job_config=job_config or Mock(),
         atoms=atoms or [Mock()],
     )
Exemple #12
0
    def start_subjob(self, subjob: Subjob):
        """
        Send a subjob of a build to this slave. The slave must have already run setup for the corresponding build.
        :param subjob: The subjob to send to this slave
        """
        if not self.is_alive():
            raise DeadSlaveError('Tried to start a subjob on a dead slave.')
        if self._is_in_shutdown_mode:
            raise SlaveMarkedForShutdownError('Tried to start a subjob on a slave in shutdown mode.')

        execution_url = self._slave_api.url('build', subjob.build_id(), 'subjob', subjob.subjob_id())
        post_data = {'atomic_commands': subjob.atomic_commands()}
        try:
            response = self._network.post_with_digest(execution_url, post_data, Secret.get(), error_on_failure=True)
        except (requests.ConnectionError, requests.Timeout, RequestFailedError) as ex:
            raise SlaveCommunicationError('Call to slave service failed: {}.'.format(repr(ex))) from ex

        subjob_executor_id = response.json().get('executor_id')
        analytics.record_event(analytics.MASTER_TRIGGERED_SUBJOB, executor_id=subjob_executor_id,
                               build_id=subjob.build_id(), subjob_id=subjob.subjob_id(), slave_id=self.id)
Exemple #13
0
 def _create_subjobs(self, count=3, num_atoms_each=1, build_id=0, job_config=None):
     return [
         Subjob(
             build_id=build_id,
             subjob_id=i,
             project_type=None,
             job_config=job_config,
             atoms=[Atom('NAME=Leonardo') for _ in range(num_atoms_each)],
         )
         for i in range(count)
     ]
Exemple #14
0
    def load_from_db(cls, build_id):
        """
        Given a build_id, fetch all the stored information from the database to reconstruct
        a Build object to represent that build.
        :param build_id: The id of the build to recreate.
        """
        with Connection.get() as session:
            build_schema = session.query(BuildSchema).filter(BuildSchema.build_id == build_id).first()
            failed_artifact_directories_schema = session.query(FailedArtifactDirectoriesSchema) \
                .filter(FailedArtifactDirectoriesSchema.build_id == build_id) \
                .all()
            failed_subjob_atom_pairs_schema = session.query(FailedSubjobAtomPairsSchema) \
                .filter(FailedSubjobAtomPairsSchema.build_id == build_id) \
                .all()
            atoms_schema = session.query(AtomsSchema).filter(AtomsSchema.build_id == build_id).all()
            subjobs_schema = session.query(SubjobsSchema).filter(SubjobsSchema.build_id == build_id).all()

            # If a query returns None, then we know the build wasn't found in the database
            if not build_schema:
                return None

            build_parameters = json.loads(build_schema.build_parameters)

            # Genereate a BuildRequest object with our query response
            build_request = BuildRequest(build_parameters)

            # Create initial Build object, we will be altering the state of this as we get more data
            build = Build(build_request)
            build._build_id = build_id

            # Manually generate ProjectType object for build and create a `job_config` since this is usually done in `prepare()`
            build.generate_project_type()
            job_config = build.project_type.job_config()

            # Manually update build data
            build._artifacts_tar_file = build_schema.artifacts_tar_file
            build._artifacts_zip_file = build_schema.artifacts_zip_file
            build._error_message = build_schema.error_message
            build._postbuild_tasks_are_finished = bool(int(build_schema.postbuild_tasks_are_finished))
            build.setup_failures = build_schema.setup_failures
            build._timing_file_path = build_schema.timing_file_path

            # Manually set the state machine timestamps
            build._state_machine._transition_timestamps = {
                BuildState.QUEUED: build_schema.queued_ts,
                BuildState.FINISHED: build_schema.finished_ts,
                BuildState.PREPARED: build_schema.prepared_ts,
                BuildState.PREPARING: build_schema.preparing_ts,
                BuildState.ERROR: build_schema.error_ts,
                BuildState.CANCELED: build_schema.canceled_ts,
                BuildState.BUILDING: build_schema.building_ts
            }
            build._state_machine._fsm.current = BuildState[build_schema.state]

            build_artifact = BuildArtifact(build_schema.build_artifact_dir)

            directories = []
            for directory in failed_artifact_directories_schema:
                directories.append(directory.failed_artifact_directory)
            build_artifact._failed_artifact_directories = directories

            pairs = []
            for pair in failed_subjob_atom_pairs_schema:
                pairs.append((pair.subjob_id, pair.atom_id))
            build_artifact._q_failed_subjob_atom_pairs = pairs

            build._build_artifact = build_artifact

            atoms_by_subjob_id = {}
            for atom in atoms_schema:
                atoms_by_subjob_id.setdefault(atom.subjob_id, [])
                atoms_by_subjob_id[atom.subjob_id].append(Atom(
                    atom.command_string,
                    atom.expected_time,
                    atom.actual_time,
                    atom.exit_code,
                    atom.state,
                    atom.atom_id,
                    atom.subjob_id
                ))

            subjobs = OrderedDict()
            for subjob in subjobs_schema:
                atoms = atoms_by_subjob_id[subjob.subjob_id]
                # Add atoms after subjob is created so we don't alter their state on initialization
                subjob_to_add = Subjob(build_id, subjob.subjob_id, build.project_type, job_config, [])
                subjob_to_add._atoms = atoms
                subjob_to_add.completed = subjob.completed
                subjobs[subjob.subjob_id] = subjob_to_add
            build._all_subjobs_by_id = subjobs

            # Place subjobs into correct queues within the build
            build._unstarted_subjobs = Queue(maxsize=len(subjobs))
            build._finished_subjobs = Queue(maxsize=len(subjobs))
            for _, subjob in subjobs.items():
                if subjob.completed:
                    build._finished_subjobs.put(subjob)
                else:
                    build._unstarted_subjobs.put(subjob)

            return build
Exemple #15
0
class TestSubjob(BaseUnitTestCase):
    def setUp(self):
        super().setUp()
        self._job_config_command = 'fake command'
        self._subjob = Subjob(
            build_id=12,
            subjob_id=34,
            project_type=Mock(spec_set=ProjectType),
            job_config=Mock(spec=JobConfig, command=self._job_config_command),
            atoms=[
                Atom(
                    'export BREAKFAST="pancakes";',
                    expected_time=23.4,
                    actual_time=56.7,
                    exit_code=1,
                    state=AtomState.NOT_STARTED,
                    atom_id=0,
                ),
                Atom(
                    'export BREAKFAST="cereal";',
                    expected_time=89.0,
                    actual_time=24.6,
                    exit_code=0,
                    state=AtomState.NOT_STARTED,
                    atom_id=1,
                ),
            ],
        )

    def test_subjob_constructor_sets_subjob_id_on_atoms(self):
        atoms = [Mock(), Mock()]
        Subjob(build_id=1,
               subjob_id=4,
               project_type=Mock(),
               job_config=Mock(),
               atoms=atoms)
        for atom in atoms:
            self.assertEqual(atom.subjob_id, 4)

    def test_api_representation_matches_expected(self):
        actual_api_repr = self._subjob.api_representation()

        expected_api_repr = {
            'id':
            34,
            'command':
            self._job_config_command,
            'slave':
            None,
            'atoms': [
                {
                    'id': 0,
                    'command_string': 'export BREAKFAST="pancakes";',
                    'expected_time': 23.4,
                    'actual_time': 56.7,
                    'exit_code': 1,
                    'state': 'NOT_STARTED',
                    'subjob_id': 34
                },
                {
                    'id': 1,
                    'command_string': 'export BREAKFAST="cereal";',
                    'expected_time': 89.0,
                    'actual_time': 24.6,
                    'exit_code': 0,
                    'state': 'NOT_STARTED',
                    'subjob_id': 34
                },
            ]
        }
        self.assertEqual(actual_api_repr, expected_api_repr,
                         'Actual api representation should match expected.')

    def _assert_atoms_are_in_state(self, api_repr, state_str):
        for atom_dict in api_repr['atoms']:
            self.assertEqual(atom_dict['state'], state_str)

    def test_mark_in_progress_marks_all_atoms_in_progress(self):
        self._subjob.mark_in_progress(None)
        actual_api_repr = self._subjob.api_representation()
        self._assert_atoms_are_in_state(actual_api_repr, 'IN_PROGRESS')

    def test_mark_completed_marks_all_atoms_completed(self):
        self._subjob.mark_completed()
        actual_api_repr = self._subjob.api_representation()
        self._assert_atoms_are_in_state(actual_api_repr, 'COMPLETED')
Exemple #16
0
class TestSubjob(BaseUnitTestCase):

    def setUp(self):
        super().setUp()
        self._job_config_command = 'fake command'
        self._subjob = Subjob(
            build_id=12,
            subjob_id=34,
            project_type=Mock(spec_set=ProjectType),
            job_config=Mock(spec=JobConfig, command=self._job_config_command),
            atoms=[
                Atom(
                    'export BREAKFAST="pancakes";',
                    expected_time=23.4,
                    actual_time=56.7,
                    exit_code=1,
                    state=AtomState.NOT_STARTED,
                    atom_id=0,
                ),
                Atom(
                    'export BREAKFAST="cereal";',
                    expected_time=89.0,
                    actual_time=24.6,
                    exit_code=0,
                    state=AtomState.NOT_STARTED,
                    atom_id=1,
                ),
            ],
        )

    def test_subjob_constructor_sets_subjob_id_on_atoms(self):
        atoms = [Mock(), Mock()]
        Subjob(build_id=1, subjob_id=4, project_type=Mock(), job_config=Mock(), atoms=atoms)
        for atom in atoms:
            self.assertEqual(atom.subjob_id, 4)

    def test_api_representation_matches_expected(self):
        actual_api_repr = self._subjob.api_representation()

        expected_api_repr = {
            'id': 34,
            'command': self._job_config_command,
            'slave': None,
            'atoms': [
                {
                    'id': 0,
                    'command_string': 'export BREAKFAST="pancakes";',
                    'expected_time': 23.4,
                    'actual_time': 56.7,
                    'exit_code': 1,
                    'state': 'NOT_STARTED',
                    'subjob_id': 34
                },
                {
                    'id': 1,
                    'command_string': 'export BREAKFAST="cereal";',
                    'expected_time': 89.0,
                    'actual_time': 24.6,
                    'exit_code': 0,
                    'state': 'NOT_STARTED',
                    'subjob_id': 34
                },
            ]
        }
        self.assertEqual(actual_api_repr, expected_api_repr, 'Actual api representation should match expected.')

    def _assert_atoms_are_in_state(self, api_repr, state_str):
        for atom_dict in api_repr['atoms']:
            self.assertEqual(atom_dict['state'], state_str)

    def test_mark_in_progress_marks_all_atoms_in_progress(self):
        self._subjob.mark_in_progress(None)
        actual_api_repr = self._subjob.api_representation()
        self._assert_atoms_are_in_state(actual_api_repr, 'IN_PROGRESS')

    def test_mark_completed_marks_all_atoms_completed(self):
        self._subjob.mark_completed()
        actual_api_repr = self._subjob.api_representation()
        self._assert_atoms_are_in_state(actual_api_repr, 'COMPLETED')