Example #1
0
  def MakeRules(cls, rules, makefile):
    """Makes all the rules in the give list.

    Args:
      rules: list: List of rules by type_base to make.
      makefile: string: The *main* makefile name.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_rules, failed_rules) specifying rules for which the make
           rules were successfully generated and for which it failed.
    """
    if not rules:
      TermColor.Warning('No rules to build.')
      return ([], rules)

    args = zip(itertools.repeat(cls), itertools.repeat('_WorkHorse'),
                          rules, itertools.repeat(makefile))
    rule_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    successful_rules = []; failed_rules = []
    for (res, rule) in rule_res:
      if res == 1:
        successful_rules += [rule]
      elif res == -1:
        failed_rules += [rule]

    return (successful_rules, failed_rules)
Example #2
0
  def WorkHorse(cls, rules):
    """Runs the workhorse for the command.

    Args:
      rules: list: List of rules to be handled.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_rules, failed_rules) specifying rules that succeeded and
          ones that failed.
    """
    (successful_build, failed_build) = Builder.WorkHorse(rules)

    # All our binaries assume they will be run from the source root.
    os.chdir(FileUtils.GetSrcRoot())

    pipe_output = len(successful_build) > 1
    args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeRule'),
                          successful_build, itertools.repeat(pipe_output))
    rule_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    successful_run = []; failed_run = []
    for (res, rule) in rule_res:
      if res == 1:
        successful_run += [rule]
      elif res == -1:
        failed_run += [rule]

    return (successful_run, failed_build + failed_run)
Example #3
0
    def WorkHorse(cls, rules):
        """Runs the workhorse for the command.

    Args:
      rules: list: List of rules to be handled.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_rules, failed_rules) specifying rules that succeeded and
          ones that failed.
    """
        (successful_expand,
         failed_expand) = Rules.GetExpandedRules(rules,
                                                 Flags.ARGS.allowed_rule_types)

        args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeRule'),
                   successful_expand)
        rule_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
        successful_deps = []
        failed_deps = []
        for (res, rule) in rule_res:
            if res == 1:
                successful_deps += [rule]
            elif res == -1:
                failed_deps += [rule]

        return (successful_deps, failed_expand + failed_deps)
Example #4
0
  def WorkHorse(cls, tasks):
    """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
    all_tasks = []
    dirs_to_import = {}
    dir_to_task_map = {}
    for set_tasks in tasks.values():
      for task in set_tasks:
        all_tasks += [task]
        out_dir = PipelineUtils.GetOutDirForTask(task)
        publish_dir = PipelineUtils.GetPublishCurrentDirForTask(task)
        if not out_dir or not publish_dir: continue
        dirs_to_import[publish_dir] = out_dir
        dir_to_task_map[publish_dir] = (dir_to_task_map.get(publish_dir, []) + [publish_dir])

    # Check if there are any directories to publish.
    if not dirs_to_import:
      TermColor.Error('Did not find any dirs to import. Do not forget to specify publish root '
                      'using --publish_root')
      return ([], all_tasks)

    # Create all the target dirs to import to.
    for dir in dirs_to_import.values():
      FileUtils.MakeDirs(dir)

    # Run all the copy tasks.
    successful_dirs = []; failed_dirs = []
    args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeTask'),
                          list(dirs_to_import), list(dirs_to_import.values()))
    dir_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    if not dir_res:
      TermColor.Error('Could not process: %s' % all_tasks)
      return ([], all_tasks)

    for (res, dir) in dir_res:
      if res == Importer.EXITCODE['SUCCESS']:
        successful_dirs += [dir]
      elif res == Importer.EXITCODE['FAILURE']:
        failed_dirs += [dir]
      else:
        TermColor.Fatal('Invalid return %d code for %s' % (res, dir))

    # Get the reverse mapping from dirs to tasks.
    successful_tasks = []; failed_tasks = []
    for i in successful_dirs:
      successful_tasks += dir_to_task_map.get(i, [])

    for i in failed_dirs:
      failed_tasks += dir_to_task_map.get(i, [])

    return (successful_tasks, failed_tasks)
Example #5
0
  def WorkHorse(cls, tasks):
    """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
    # All our binaries assume they will be run from the source root.
    start = time.time()

    os.chdir(FileUtils.GetSrcRoot())
    cls._CreateDirsForTasks(tasks)

    successful_run = []; failed_run = []
    aborted_task = None

    # NOTE(stephen): Storing task dir status and task out dir status separately since
    # pipelines do not always have an out dir defined.
    dirs_status = {}
    out_dirs_status = {}
    for set_tasks in tasks.values():
      if aborted_task:
        failed_run += set_tasks
        continue

      tasks_to_run = []
      for task in set_tasks:
        task_options = cls.__GetTaskOptions(task)
        # Check if this task requires all previous tasks in the same directory to be
        # successful.
        if task_options[Runner.TASK_OPTIONS['REQUIRE_DIR_SUCCESS']]:
          task_dir = PipelineUtils.TaskDirName(task)
          cur_dir_status = dirs_status.get(task_dir)
          # If any previous tasks have been run in this directory, check to ensure all
          # of them were successful.
          if cur_dir_status and cur_dir_status != Runner.EXITCODE['SUCCESS']:
            failed_run += [task]
            task_display_name = PipelineUtils.TaskDisplayName(task)
            TermColor.Info('Skipped   %s' % task_display_name)
            TermColor.Failure(
              'Skipped Task: %s due to earlier failures in task dir' % task_display_name
            )
            continue

        tasks_to_run.append(task)

      # It is possible for all steps at this priority level to be skipped due to the
      # task options selected.
      if set_tasks and not tasks_to_run:
        continue

      # Run all the tasks at the same priority in parallel.
      args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeTask'),
                            tasks_to_run)
      task_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
      # task_res = []
      # for task in tasks_to_run: task_res += [cls._RunSingeTask(task)]
      if not task_res:
        TermColor.Error('Could not process: %s' % tasks_to_run)
        failed_run += tasks_to_run
        continue
      for (res, task) in task_res:
        if res == Runner.EXITCODE['SUCCESS']:
          successful_run += [task]
        elif res == Runner.EXITCODE['FAILURE']:
          failed_run += [task]
        elif res == Runner.EXITCODE['ALLOW_FAIL']:
          failed_run += [task]
        elif res == Runner.EXITCODE['ABORT_FAIL']:
          failed_run += [task]
          aborted_task = task
        else:
          TermColor.Fatal('Invalid return %d code for %s' % (res, task))

        # Update the current status of all tasks in the same directory.
        task_dir = PipelineUtils.TaskDirName(task)
        dirs_status[task_dir] = max(
          dirs_status.get(task_dir, Runner.EXITCODE['_LOWEST']), res,
        )

        # Update the out dir status.
        out_dir = PipelineUtils.GetOutDirForTask(task)
        if out_dir:
          out_dirs_status[out_dir] = max(
            out_dirs_status.get(out_dir, Runner.EXITCODE['_LOWEST']), res,
          )

    # Write the status files to the dirs.
    cls._WriteOutDirsStatus(out_dirs_status)

    # Send the final status mail.
    time_taken = time.time() - start
    cls._SendFinalStatusMail(successful_run, failed_run, aborted_task, time_taken)

    if aborted_task:
      TermColor.Failure('Aborted by task: %s' % aborted_task)

    return (successful_run, failed_run)