class TestSubjobCalculator(BaseUnitTestCase):
    @genty_dataset(
        atoms_override_specified=(['override1', 'override2'], None, False),
        atoms_override_not_specified=(None, [
            Atom('atom_value_1'), Atom('atom_value_2')
        ], True),
    )
    def test_compute_subjobs_for_build_only_atomizes_if_override_not_specified(
            self, atoms_override, atomizer_output, atomizer_called):
        """
        :type atoms_override: list[str] | None
        :type atomizer_output: list[Atom] | None
        :type atomizer_called: bool
        """
        self.patch('os.path.isfile').return_value = False
        mock_project = Mock(spec_set=ProjectType())
        mock_project.atoms_override = atoms_override
        mock_project.timing_file_path.return_value = '/some/path/doesnt/matter'
        mock_project.project_directory = '/some/project/directory'
        mock_atomizer = Mock(spec_set=Atomizer)
        mock_atomizer.atomize_in_project.return_value = atomizer_output
        mock_job_config = Mock(spec=JobConfig)
        mock_job_config.name = 'some_config'
        mock_job_config.max_executors = 1
        mock_job_config.atomizer = mock_atomizer

        subjob_calculator = SubjobCalculator()
        subjob_calculator.compute_subjobs_for_build(build_id=1,
                                                    job_config=mock_job_config,
                                                    project_type=mock_project)

        self.assertEquals(mock_atomizer.atomize_in_project.called,
                          atomizer_called)
Ejemplo n.º 2
0
 def setUp(self):
     super().setUp()
     self._job_config_command = 'fake command'
     self._subjob = Subjob(
         build_id=12,
         subjob_id=34,
         project_type=Mock(spec_set=ProjectType),
         job_config=Mock(spec=JobConfig, command=self._job_config_command),
         atoms=[
             Atom(
                 'export BREAKFAST="pancakes";',
                 expected_time=23.4,
                 actual_time=56.7,
                 exit_code=1,
                 state=AtomState.NOT_STARTED,
                 atom_id=0,
             ),
             Atom(
                 'export BREAKFAST="cereal";',
                 expected_time=89.0,
                 actual_time=24.6,
                 exit_code=0,
                 state=AtomState.NOT_STARTED,
                 atom_id=1,
             ),
         ],
     )
Ejemplo n.º 3
0
    def test_api_representation_matches_expected(self):
        job_config_command = 'fake command'
        subjob = Subjob(
            build_id=12,
            subjob_id=34,
            project_type=Mock(spec_set=ProjectType),
            job_config=Mock(spec=JobConfig, command=job_config_command),
            atoms=[
                Atom('BREAKFAST',
                     'pancakes',
                     expected_time=23.4,
                     actual_time=56.7),
                Atom('BREAKFAST',
                     'cereal',
                     expected_time=89.0,
                     actual_time=24.6),
            ],
        )

        actual_api_repr = subjob.api_representation()

        expected_api_repr = {
            'id':
            34,
            'command':
            job_config_command,
            'atoms': [
                {
                    'id':
                    0,
                    'atom':
                    get_environment_variable_setter_command(
                        'BREAKFAST', 'pancakes'),
                    'expected_time':
                    23.4,
                    'actual_time':
                    56.7,
                },
                {
                    'id':
                    1,
                    'atom':
                    get_environment_variable_setter_command(
                        'BREAKFAST', 'cereal'),
                    'expected_time':
                    89.0,
                    'actual_time':
                    24.6,
                },
            ]
        }
        self.assertEqual(actual_api_repr, expected_api_repr,
                         'Actual api representation should match expected.')
Ejemplo n.º 4
0
def compute_subjobs_for_build(build_id, job_config, project_type):
    """
    Calculate subjobs for a build.
    :type build_id: int
    :type job_config: JobConfig
    :param project_type: the project_type that the build is running in
    :type project_type: project_type.project_type.ProjectType
    :rtype: list[Subjob]
    """
    # Users can override the list of atoms to be run in this build. If the atoms_override
    # was specified, we can skip the atomization step and use those overridden atoms instead.
    if project_type.atoms_override is not None:
        atoms_string_list = project_type.atoms_override
        atoms_list = [
            Atom(atom_string_value) for atom_string_value in atoms_string_list
        ]
    else:
        atoms_list = job_config.atomizer.atomize_in_project(project_type)

    # Group the atoms together using some grouping strategy
    timing_file_path = project_type.timing_file_path(job_config.name)
    grouped_atoms = _grouped_atoms(atoms_list, job_config.max_executors,
                                   timing_file_path,
                                   project_type.project_directory)

    # Generate subjobs for each group of atoms
    subjobs = []
    for subjob_id, subjob_atoms in enumerate(grouped_atoms):
        # The atom id isn't calculated until the atom has been grouped into a subjob.
        for atom_id, atom in enumerate(subjob_atoms):
            atom.id = atom_id
        subjobs.append(
            Subjob(build_id, subjob_id, project_type, job_config,
                   subjob_atoms))
    return subjobs
Ejemplo n.º 5
0
    def atomize_in_project(self, project_type):
        """
        Translate the atomizer dicts that this instance was initialized with into a list of actual atom commands. This
        executes atomizer commands inside the given project in order to generate the atoms.

        :param project_type: The ProjectType instance in which to execute the atomizer commands
        :type project_type: ProjectType
        :return: The list of environment variable "export" atom commands
        :rtype: list[app.master.atom.Atom]
        """
        atoms_list = []
        for atomizer_dict in self._atomizer_dicts:
            for atomizer_var_name, atomizer_command in atomizer_dict.items():
                atomizer_output, exit_code = project_type.execute_command_in_project(
                    atomizer_command)
                if exit_code != 0:
                    self._logger.error(
                        'Atomizer command "{}" for variable "{}" failed with exit code: {} and output:'
                        '\n{}', atomizer_command, atomizer_var_name, exit_code,
                        atomizer_output)
                    raise AtomizerError('Atomizer command failed!')

                new_atoms = []
                for atom_value in atomizer_output.strip().splitlines():
                    # For purposes of matching atom string values across builds, we must replace the generated/unique
                    # project directory with its corresponding universal environment variable: '$PROJECT_DIR'.
                    atom_value = atom_value.replace(
                        project_type.project_directory, '$PROJECT_DIR')
                    new_atoms.append(
                        Atom(
                            get_environment_variable_setter_command(
                                atomizer_var_name, atom_value)))
                atoms_list.extend(new_atoms)

        return atoms_list
Ejemplo n.º 6
0
 def _create_subjobs(self, count=3, num_atoms_each=1, build_id=0, job_config=None):
     return [
         Subjob(
             build_id=build_id,
             subjob_id=i,
             project_type=None,
             job_config=job_config,
             atoms=[Atom('NAME=Leonardo') for _ in range(num_atoms_each)],
         )
         for i in range(count)
     ]
Ejemplo n.º 7
0
    def atomize_in_project(self, project_type):
        """
        Translate the atomizer dicts that this instance was initialized with into a list of actual atom commands. This
        executes atomizer commands inside the given project in order to generate the atoms.

        :param project_type: The ProjectType instance in which to execute the atomizer commands
        :type project_type: ProjectType
        :return: The list of environment variable "export" atom commands
        :rtype: list[app.master.atom.Atom]
        """
        atoms_list = []
        for atomizer_dict in self._atomizer_dicts:
            for atomizer_var_name, atomizer_command in atomizer_dict.items():
                atomizer_output, exit_code = project_type.execute_command_in_project(atomizer_command)
                if exit_code != 0:
                    self._logger.error('Atomizer command "{}" for variable "{}" failed with exit code: {} and output:'
                                       '\n{}', atomizer_command, atomizer_var_name, exit_code, atomizer_output)
                    raise AtomizerError('Atomizer command failed!')

                new_atoms = [Atom(atomizer_var_name, atom_value)
                             for atom_value in atomizer_output.strip().splitlines()]
                atoms_list.extend(new_atoms)

        return atoms_list
Ejemplo n.º 8
0
 def _mock_atoms(self, command_strings):
     atom_spec = Atom('key', 'val')
     return [
         Mock(spec_set=atom_spec, command_string=cmd)
         for cmd in command_strings
     ]
Ejemplo n.º 9
0
    def load_from_db(cls, build_id):
        """
        Given a build_id, fetch all the stored information from the database to reconstruct
        a Build object to represent that build.
        :param build_id: The id of the build to recreate.
        """
        with Connection.get() as session:
            build_schema = session.query(BuildSchema).filter(BuildSchema.build_id == build_id).first()
            failed_artifact_directories_schema = session.query(FailedArtifactDirectoriesSchema) \
                .filter(FailedArtifactDirectoriesSchema.build_id == build_id) \
                .all()
            failed_subjob_atom_pairs_schema = session.query(FailedSubjobAtomPairsSchema) \
                .filter(FailedSubjobAtomPairsSchema.build_id == build_id) \
                .all()
            atoms_schema = session.query(AtomsSchema).filter(AtomsSchema.build_id == build_id).all()
            subjobs_schema = session.query(SubjobsSchema).filter(SubjobsSchema.build_id == build_id).all()

            # If a query returns None, then we know the build wasn't found in the database
            if not build_schema:
                return None

            build_parameters = json.loads(build_schema.build_parameters)

            # Genereate a BuildRequest object with our query response
            build_request = BuildRequest(build_parameters)

            # Create initial Build object, we will be altering the state of this as we get more data
            build = Build(build_request)
            build._build_id = build_id

            # Manually generate ProjectType object for build and create a `job_config` since this is usually done in `prepare()`
            build.generate_project_type()
            job_config = build.project_type.job_config()

            # Manually update build data
            build._artifacts_tar_file = build_schema.artifacts_tar_file
            build._artifacts_zip_file = build_schema.artifacts_zip_file
            build._error_message = build_schema.error_message
            build._postbuild_tasks_are_finished = bool(int(build_schema.postbuild_tasks_are_finished))
            build.setup_failures = build_schema.setup_failures
            build._timing_file_path = build_schema.timing_file_path

            # Manually set the state machine timestamps
            build._state_machine._transition_timestamps = {
                BuildState.QUEUED: build_schema.queued_ts,
                BuildState.FINISHED: build_schema.finished_ts,
                BuildState.PREPARED: build_schema.prepared_ts,
                BuildState.PREPARING: build_schema.preparing_ts,
                BuildState.ERROR: build_schema.error_ts,
                BuildState.CANCELED: build_schema.canceled_ts,
                BuildState.BUILDING: build_schema.building_ts
            }
            build._state_machine._fsm.current = BuildState[build_schema.state]

            build_artifact = BuildArtifact(build_schema.build_artifact_dir)

            directories = []
            for directory in failed_artifact_directories_schema:
                directories.append(directory.failed_artifact_directory)
            build_artifact._failed_artifact_directories = directories

            pairs = []
            for pair in failed_subjob_atom_pairs_schema:
                pairs.append((pair.subjob_id, pair.atom_id))
            build_artifact._q_failed_subjob_atom_pairs = pairs

            build._build_artifact = build_artifact

            atoms_by_subjob_id = {}
            for atom in atoms_schema:
                atoms_by_subjob_id.setdefault(atom.subjob_id, [])
                atoms_by_subjob_id[atom.subjob_id].append(Atom(
                    atom.command_string,
                    atom.expected_time,
                    atom.actual_time,
                    atom.exit_code,
                    atom.state,
                    atom.atom_id,
                    atom.subjob_id
                ))

            subjobs = OrderedDict()
            for subjob in subjobs_schema:
                atoms = atoms_by_subjob_id[subjob.subjob_id]
                # Add atoms after subjob is created so we don't alter their state on initialization
                subjob_to_add = Subjob(build_id, subjob.subjob_id, build.project_type, job_config, [])
                subjob_to_add._atoms = atoms
                subjob_to_add.completed = subjob.completed
                subjobs[subjob.subjob_id] = subjob_to_add
            build._all_subjobs_by_id = subjobs

            # Place subjobs into correct queues within the build
            build._unstarted_subjobs = Queue(maxsize=len(subjobs))
            build._finished_subjobs = Queue(maxsize=len(subjobs))
            for _, subjob in subjobs.items():
                if subjob.completed:
                    build._finished_subjobs.put(subjob)
                else:
                    build._unstarted_subjobs.put(subjob)

            return build