示例#1
0
    def __CreateInitialSubDirs(self):
        """Creates all necessary directories."""
        # Check if we have to create any output directory
        if not Flags.ARGS.out_dirs and Flags.ARGS.nolog_output: return

        # Create the user friendly link to pipeline dir if it doesn't already exist.
        FileUtils.CreateLink(FileUtils.GetPipelineLinkDir(),
                             FileUtils.GetPipelineDir())

        # Create the output directory.
        if Flags.ARGS.out_dirs or not Flags.ARGS.log_to_tmp:
            self._pipeline_output_dir = os.path.join(
                FileUtils.GetPipelineDir(), self._id)
            FileUtils.MakeDirs(self._pipeline_output_dir)

        # Create the log dir if required.
        if not Flags.ARGS.nolog_output:
            if Flags.ARGS.log_to_tmp:
                log_root = os.path.join('/tmp', 'pipeline', self.pipeline_id())
            else:
                log_root = self._pipeline_output_dir
            self._pipeline_log_dir = os.path.join(log_root, 'log',
                                                  self.pipeline_date())
            FileUtils.MakeDirs(self._pipeline_log_dir)

        # Create all the subdirs.
        self._subdirs = {}
        for i in Flags.ARGS.out_dirs:
            subdir = os.path.join(self.pipeline_output_dir(), i)
            FileUtils.MakeDirs(subdir)
            self._subdirs['PIPELINE_' + i.upper() + '_DIR'] = subdir
示例#2
0
  def WorkHorse(cls, tasks):
    """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
    all_tasks = []
    dirs_to_import = {}
    dir_to_task_map = {}
    for set_tasks in tasks.values():
      for task in set_tasks:
        all_tasks += [task]
        out_dir = PipelineUtils.GetOutDirForTask(task)
        publish_dir = PipelineUtils.GetPublishCurrentDirForTask(task)
        if not out_dir or not publish_dir: continue
        dirs_to_import[publish_dir] = out_dir
        dir_to_task_map[publish_dir] = (dir_to_task_map.get(publish_dir, []) + [publish_dir])

    # Check if there are any directories to publish.
    if not dirs_to_import:
      TermColor.Error('Did not find any dirs to import. Do not forget to specify publish root '
                      'using --publish_root')
      return ([], all_tasks)

    # Create all the target dirs to import to.
    for dir in dirs_to_import.values():
      FileUtils.MakeDirs(dir)

    # Run all the copy tasks.
    successful_dirs = []; failed_dirs = []
    args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeTask'),
                          list(dirs_to_import), list(dirs_to_import.values()))
    dir_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    if not dir_res:
      TermColor.Error('Could not process: %s' % all_tasks)
      return ([], all_tasks)

    for (res, dir) in dir_res:
      if res == Importer.EXITCODE['SUCCESS']:
        successful_dirs += [dir]
      elif res == Importer.EXITCODE['FAILURE']:
        failed_dirs += [dir]
      else:
        TermColor.Fatal('Invalid return %d code for %s' % (res, dir))

    # Get the reverse mapping from dirs to tasks.
    successful_tasks = []; failed_tasks = []
    for i in successful_dirs:
      successful_tasks += dir_to_task_map.get(i, [])

    for i in failed_dirs:
      failed_tasks += dir_to_task_map.get(i, [])

    return (successful_tasks, failed_tasks)
示例#3
0
    def CreateAllSubDirsForPath(self, path):
        """Creates all the subdirs for the given node.

    Args:
      node: string: The node for which the path needs to be created.

    Returns:
      dict {string, string}: The dictionary of SUBDIR IDS to actual paths.
    """
        for k, v in self.GetAllSubDirsForPath(path).items():
            FileUtils.MakeDirs(v)