Exemple #1
0
  def ValidateRule(cls, name, rule_type, args):
    """Validates the rules.

    Args:
      name: string: The name of the rule.
      rule_type: string: The rule_type of the rule.
      args: dict: The arguments passed to the rule function.

    Exceptions:
      RulesParseError: Raises exception if validation fails.
    """
    # Check name.
    if not name or name.find('.') != -1 or rule_type not in cls.PARSED_RULE_TYPES:
      err_str = 'Invalid target [%s] of rule_type [%s].' % (name, rule_type)
      TermColor.Error(err_str)
      raise RulesParseError(err_str)

    # Get the expanded names for all src, hdr, dep args.
    for field in ['src', 'hdr', 'dep', 'main', 'prebuild', 'flag', 'link']:
      field_data = args.get(field, [])
      if not field_data: continue
      if not isinstance(field_data, list):
        err_str = ('Invalid target: [%s]. field [%s] must be of <type \'list\'>, not %s' %
                   (name, field, type(field_data)))
        TermColor.Error(err_str)
        raise RulesParseError(err_str)
Exemple #2
0
  def LoadRule(cls, rule):
    """Loads the rule.

    Args:
      rule: string: The rule that needs to be loaded.

    Return:
      boolean: True if rule is already present or successfully loaded and false
          otherwise.
    """
    # Check if the rule is loaded.
    if cls.GetRule(rule): return True

    (dirname, targetname) = os.path.split(rule)
    rules_file = os.path.join(dirname, 'RULES')
    if not dirname or not os.path.isfile(rules_file):
      TermColor.Error('No rules file %s for target %s ' % (
          rules_file, Utils.RuleDisplayName(rule)))
      return False

    try:
      Rules.LoadRules(dirname)
      return True
    except Exception as e:
      if type(e) == KeyboardInterrupt: raise e
      TermColor.PrintException('Could not load %s. ' % Utils.RuleDisplayName(rule))
      return False
Exemple #3
0
  def LoadRules(cls, dirname):
    """Load RULES file from the given directory.

    Args:
      dirname: string: The dirname for which the Rules file needs to be loaded.

    Exceptions:
      RulesParseError: Raises exception if parsing fails.
    """
    # check if this directory has already been loaded
    rules_file = os.path.join(dirname, 'RULES')
    if not os.path.isfile(rules_file):
      TermColor.Error('Cannot find file: %s' % rules_file)
      return

    if rules_file in cls.loaded:
      return

    with cls.LOAD_LOCK:
      cls.loaded |= set([rules_file])
      # Save basedir for restoration later.
      oldbasedir = cls.basedir
      cls.basedir = dirname
      TermColor.VInfo(5, 'Reading %s' % rules_file)
      exec(compile(open(rules_file).read(), rules_file, 'exec'))
      cls.basedir = oldbasedir
Exemple #4
0
    def WorkHorse(cls, tasks):
        """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.


    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
        success_tasks = []
        paths_to_clean = set()
        for set_tasks in tasks.values():
            for task in set_tasks:
                paths = PipelineConfig.Instance().GetAllSubDirsForPath(
                    PipelineUtils.GetTaskOutputRelativeDir(task))
                paths_to_clean |= set(paths.values())
                success_tasks += [task]

        TermColor.VInfo(1, 'Cleaning %d' % len(paths_to_clean))
        for i in paths_to_clean:
            TermColor.VInfo(3, 'Cleaning %s' % i)
            shutil.rmtree(i, True)

        return (success_tasks, [])
Exemple #5
0
  def GetFilesInDir(cls, dir, recurse=True, ignore_list=[]):
    """Given a directory, returns all the files in it and sub directories.

    Args:
      dir: string: The directory to walk.
      recurse: boolean: If we should recurse the directory tree.
      ignore_list: list: List of strings to ignore.

    Return:
      list: List of files.
    """
    out_files = []
    if not os.path.isdir(dir):
      TermColor.Warning('Not a directory: %s' % dir)
      return out_files

    for (root, subdirs, files) in os.walk(dir):
      ignore = cls.IgnorePath(root, ignore_list)
      if ignore:
        TermColor.Info('Ignored dirs in %s as anything with [%s] is ignored' % (root, ignore))
        continue
      out_files += [os.path.join(root, x) for x in files]
      # Check if we should continue the walk.
      if not recurse: break

    return out_files
Exemple #6
0
    def GetRulesFilesFromSubdirs(cls, dir, ignore_list=[]):
        """Given a directory, returns the rules files from all the subdirectories.
    Args:
      dir: string: The directory to walk.
      ignore_list: list: List of strings to ignore.

    Return:
      list: List of rules files to be run.
    """
        rules = []
        if not os.path.isdir(dir):
            TermColor.Warning('Not a directory: %s' % dir)
            return rules

        for (root, subdirs, files) in os.walk(dir):
            if 'RULES' in files:
                ignore = cls.IgnoreRule(root, ignore_list)
                if ignore:
                    TermColor.Info(
                        'Ignored targets in %s as anything with [%s] is ignored'
                        % (root, ignore))
                    continue
                rules += [cls.RuleNormalizedName(os.path.join(root, 'RULES'))]

        return rules
Exemple #7
0
  def WorkHorse(cls, tasks):
    """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
    all_tasks = []
    dirs_to_import = {}
    dir_to_task_map = {}
    for set_tasks in tasks.values():
      for task in set_tasks:
        all_tasks += [task]
        out_dir = PipelineUtils.GetOutDirForTask(task)
        publish_dir = PipelineUtils.GetPublishCurrentDirForTask(task)
        if not out_dir or not publish_dir: continue
        dirs_to_import[publish_dir] = out_dir
        dir_to_task_map[publish_dir] = (dir_to_task_map.get(publish_dir, []) + [publish_dir])

    # Check if there are any directories to publish.
    if not dirs_to_import:
      TermColor.Error('Did not find any dirs to import. Do not forget to specify publish root '
                      'using --publish_root')
      return ([], all_tasks)

    # Create all the target dirs to import to.
    for dir in dirs_to_import.values():
      FileUtils.MakeDirs(dir)

    # Run all the copy tasks.
    successful_dirs = []; failed_dirs = []
    args = zip(itertools.repeat(cls), itertools.repeat('_RunSingeTask'),
                          list(dirs_to_import), list(dirs_to_import.values()))
    dir_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    if not dir_res:
      TermColor.Error('Could not process: %s' % all_tasks)
      return ([], all_tasks)

    for (res, dir) in dir_res:
      if res == Importer.EXITCODE['SUCCESS']:
        successful_dirs += [dir]
      elif res == Importer.EXITCODE['FAILURE']:
        failed_dirs += [dir]
      else:
        TermColor.Fatal('Invalid return %d code for %s' % (res, dir))

    # Get the reverse mapping from dirs to tasks.
    successful_tasks = []; failed_tasks = []
    for i in successful_dirs:
      successful_tasks += dir_to_task_map.get(i, [])

    for i in failed_dirs:
      failed_tasks += dir_to_task_map.get(i, [])

    return (successful_tasks, failed_tasks)
Exemple #8
0
  def GetExpandedRules(cls, rules, allowed_rule_types=None):
    """Returns the expanded rules corresponding to input rules.
    Args:
      rules: list: List of rules for which the automake is to be generated.
      allowed_rule_types: list: List of allowed rules to use from the RULES
          file. e.g. ['cc_bin', 'cc_test'] will create make rules for all
          'cc_bin' and 'cc_test' rules in the RULES file but not for 'cc_lib'
          rules.

    Return:
      (list, list): Returns a tuple in the form (successful_rules, failed_rules)
          specifying rules that were expanded successfully and ones that failed.
    """
    if not allowed_rule_types:
      allowed_rule_types = cls.PARSED_RULE_TYPES

    successful_rules = []
    failed_rules = []
    for target in rules:
      if not cls.LoadRule(target):
        failed_rules += [target]
        continue

      expanded_targets = []
      (dirname, targetname) = os.path.split(target)
      if targetname == 'RULES':
        expanded_targets = cls.GetRulesForDir(dirname, allowed_rule_types)
        if not expanded_targets:
          TermColor.Warning('No rules found in %s' % target)
          continue
      else:
        expanded_targets = [targetname]

      for item in expanded_targets:
        item_rule = os.path.join(dirname, item)
        rule_data = cls.GetRule(item_rule)
        if not rule_data:
          TermColor.Error('Unable to find a rule for %s' %
                          Utils.RuleDisplayName(item_rule))
          failed_rules += [item_rule]
          continue

        rule_type = rule_data.get('_type' , 'invalid')
        if not rule_type in allowed_rule_types:
          TermColor.Error('Rule %s of type %s not allowed ' %
                          (Utils.RuleDisplayName(item_rule), rule_type))
          failed_rules += [item_rule]
          continue

        # All good.
        successful_rules += [item_rule]

    return (successful_rules, failed_rules)
Exemple #9
0
  def RunCmd(cmd, timeout_sec=sys.maxsize, piped_output=True, extra_env=None):
    """Executes a command.
    Args:
      cmd: string: A string specifying the command to execute.
      timeout: float: Timeout for the command in seconds.
      piped_output: bool: Set to true if the output is to be dumped directly
          to termimal.
      extra_env: dict{string, string}: The extra environment variables to pass to the cmd.
    """
    TermColor.VInfo(2, 'Executing: %s' % cmd)

    try:
      if extra_env:
        cmd_env = os.environ.copy()
        cmd_env.update(extra_env)
      else:
        cmd_env = os.environ

      timer = None
      proc = None

      if piped_output:
        proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
                                stderr=subprocess.STDOUT, env=cmd_env)
      else:
        proc = subprocess.Popen(cmd, shell=True, env=cmd_env)

      # Start timeout.
      timer = Timer(timeout_sec, ExecUtils.__ProcessTimedOut,
                    [proc, cmd, timeout_sec])
      timer.start()
      (merged_out, unused) = proc.communicate()
      timer.cancel()
      retcode = proc.poll()
      if not merged_out:
        merged_out = ''

      if retcode:
        TermColor.Error('%s failed.\nErrorcode: %d' % (cmd, retcode))
        TermColor.Info('%s Output: \n%s' % (cmd, merged_out))
      else:
        TermColor.VInfo(4, '%s Output: \n%s' % (cmd, merged_out))
      return (retcode, merged_out)
    except (KeyboardInterrupt, OSError) as e:
      TermColor.Error('Command: %s failed. Error: %s' % (cmd, e))
      if timer: timer.cancel()
      if proc:
        ExecUtils.__KillSubchildren(proc.pid)
        proc.communicate()
      # Pass on the keyboard interrupt.
      if type(e) == KeyboardInterrupt: raise e
    return (301, '')
Exemple #10
0
  def WorkHorse(cls, tasks):
    """Runs the workhorse for the command.

    Args:
      tasks: OrderedDict {int, set(string)}: Dict from priority to set of tasks to execute at the
          priority. Note: the dict is ordered by priority.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_tasks, failed_tasks) specifying tasks that succeeded and
          ones that failed.
    """
    all_tasks = []
    dirs_to_publish = set()
    publish_dir_to_task_map = {}
    for set_tasks in tasks.values():
      for task in set_tasks:
        all_tasks += [task]
        publish_dir = PipelineUtils.GetPublishDirForTask(task)
        if not publish_dir: continue
        dirs_to_publish |= set([publish_dir])
        publish_dir_to_task_map[publish_dir] = (publish_dir_to_task_map.get(publish_dir, []) +
                                                [publish_dir])

    # Check if there are any directories to publish.
    if not dirs_to_publish:
      TermColor.Error('Did not find any dirs to publish. Do not forget to specify publish root '
                      'using --publish_root')
      return ([], all_tasks)

    # Run all the copy tasks.
    successful_dirs = []; failed_dirs = []
    for dir in dirs_to_publish:
      publish_dir = cls._GetActualPublishDir(dir)
      if not publish_dir:
        failed_dirs += [publish_dir]
        continue
      (parent, name) = os.path.split(publish_dir)
      TermColor.Info('Making current: %s' % publish_dir)
      with FileUtils.PushDir(parent):
        FileUtils.CreateLink('current', name)
      successful_dirs += [publish_dir]

    # Get the reverse mapping from dirs to tasks.
    successful_tasks = []; failed_tasks = []
    for i in successful_dirs:
      successful_tasks += publish_dir_to_task_map.get(i, [])

    for i in failed_dirs:
      failed_tasks += publish_dir_to_task_map.get(i, [])

    return (successful_tasks, failed_tasks)
Exemple #11
0
    def _ComputeRules(cls, targets, ignore_list=[]):
        """Computes the rules to be run given the input targets.
    Args:
      targets: list: List of input targets.
    Return:
      list: List of actual rules to be run.
    """
        rules = []
        for target in targets:
            ignore = Utils.IgnoreRule(target, ignore_list)
            if ignore:
                TermColor.Warning(
                    'Ignored target %s as anything with [%s] is ignored.' %
                    (target, ignore))
                continue

            if os.path.isdir(target):
                target = os.getcwd() if target == '.' else target
                rule = os.path.join(target, 'RULES')
                if os.path.isfile(rule):
                    rules += [Utils.RuleNormalizedName(rule)]
                else:
                    TermColor.Warning('No RULES file in directory: %s' %
                                      target)
            elif os.path.isfile(target):
                rules += [
                    Utils.RuleNormalizedName(os.path.splitext(target)[0])
                ]
            elif os.path.basename(target) == '...':
                dir = os.path.dirname(target)
                if not dir: dir = os.getcwd()
                dir = os.path.dirname(
                    Utils.RuleNormalizedName(os.path.join(dir, 'RULES')))
                rules += Utils.GetRulesFilesFromSubdirs(dir, ignore_list)
            else:
                rules += [Utils.RuleNormalizedName(target)]

        temp_list = []
        seen = set()
        for rule in rules:
            if rule in seen: continue
            temp_list += [rule]
            seen |= set([rule])

        rules = []
        for rule in temp_list:
            if ((os.path.basename(rule) != 'RULES') and
                (os.path.join(os.path.dirname(rule), 'RULES') in seen)):
                continue
            rules += [rule]

        return rules
Exemple #12
0
  def update_submodules(cls):
    """Does a git pull and then update the submodules to the latest version
    AND finally ensure the submodule is on master
    @warning if you run this from a module run that does a os.chdir, this
       os.chdir will NOT persist here
    """
    if ExecUtils.RunCmd('git pull')[0]:
      raise Error(TermColor.ColorStr(
        'unable to git pull as part of submodule update', 'RED'))

    if ExecUtils.RunCmd('git submodule init && git submodule update')[0]:
      raise Error(TermColor.ColorStr(
        'git submodule update failed!', 'RED'))
Exemple #13
0
 def commit_push(cls, files, msg):
   """Commits to the current branch AND pushes to remote
   Args:
     files (list) - list of files to commit
     msg (string) - the commit message
   """
   ret = ExecUtils.RunCmd('git commit %s -m "%s"' % (' '.join(files), msg))[0]
   if not ret == 0:
     raise Error(TermColor.ColorStr(
       'error committing these files: %s' % ' '.join(files), 'RED'))
   ret = ExecUtils.RunCmd('git pull && git push')[0]
   if not ret == 0:
     raise Error(TermColor.ColorStr(
       'Please manually resolve any conflicts preventing git push of ' + \
       'the commit to remote', 'RED'))
Exemple #14
0
  def MakeRules(cls, rules, makefile):
    """Makes all the rules in the give list.

    Args:
      rules: list: List of rules by type_base to make.
      makefile: string: The *main* makefile name.

    Return:
      (list, list): Returns a tuple of list in the form
          (successful_rules, failed_rules) specifying rules for which the make
           rules were successfully generated and for which it failed.
    """
    if not rules:
      TermColor.Warning('No rules to build.')
      return ([], rules)

    args = zip(itertools.repeat(cls), itertools.repeat('_WorkHorse'),
                          rules, itertools.repeat(makefile))
    rule_res = ExecUtils.ExecuteParallel(args, Flags.ARGS.pool_size)
    successful_rules = []; failed_rules = []
    for (res, rule) in rule_res:
      if res == 1:
        successful_rules += [rule]
      elif res == -1:
        failed_rules += [rule]

    return (successful_rules, failed_rules)
Exemple #15
0
    def send_message_from_files(self,
                                sender='',
                                receivers=[],
                                subject='Automated Mail',
                                filenames=[],
                                body=''):
        """Sends a message from files.

    Args:
      sender: string: The email id of the sender. Default = username@machinename
      receivers: list[string]: The email ids of receivers.
      subject: string: The subject for the mail.
      filenames: list[string]: The list of file to read to append the data.
      body: string: The body for the mail before the files are appended.

    Return:
      boolean: True if the mail was sent and false otherwise.
    """
        outer = self.PrepareMultipartMessage(sender, receivers, subject)
        outer.attach(MIMEText(body))
        for filename in filenames:
            outer.attach(MIMEText('$cat %s\n' % filename))
            try:
                with open(filename, 'r') as fp:
                    outer.attach(MIMEText(fp.read()))
            except Exception as e:  # TODO(pramodg): Make this more restricitive
                err = 'Could not open file: %s. Error: %s : %s' % (filename,
                                                                   type(e), e)
                TermColor.Error(err)
                outer.attach(MIMEText(err))
        return self.send_message(outer)
Exemple #16
0
 def get_current_branch(cls):
   """Returns the name of the current branch"""
   cmd = 'git rev-parse --abbrev-ref HEAD'
   r = ExecUtils.RunCmd(cmd)
   if r[0]:
     raise Error(TermColor.ColorStr('error executing cmd %s' % cmd, 'RED'))
   return r[1].strip()
Exemple #17
0
    def Run(self):
        self._Init()

        start = time.time()
        try:
            status = Flags.ARGS.func()
        except KeyboardInterrupt as e:
            TermColor.Warning('KeyboardInterrupt')
            status = 1
        duration = 'Took %.2fs' % (time.time() - start)
        if not status:
            TermColor.Success(duration)
        else:
            TermColor.Failure(duration)

        return status
Exemple #18
0
  def __GetTimeOutForTask(cls, task):
    """Returns the timeout for the task.

    Args:
      task: string: The task for which the timeout should be prepared.

    Returns:
      int: The timeout in seconds.
    """
    timeout = FileUtils.FileContents(task + '.timeout')
    if not timeout:
      timeout = FileUtils.FileContents(os.path.join(PipelineUtils.TaskDirName(task), 'timeout'))

    if not timeout: return Flags.ARGS.timeout

    timeout = re.sub('\s*', '', timeout)
    timeout_parts = re.split('(\d+)', timeout)
    if len(timeout_parts) < 3:
      TermColor.Warning('Ignoring invalid timeout [%s] for task: %s' % (timeout, task))
      return Flags.ARGS.timeout

    timeout = float(timeout_parts[1])
    annotation = timeout_parts[2]
    if not annotation: return timeout
    elif annotation == 'd': timeout *= 86400
    elif annotation == 'h': timeout *= 3600
    elif annotation == 'm': timeout *= 60
    elif annotation == 'ms': timeout *= 0.001
    elif annotation == 'us': timeout *= 0.000001
    return timeout
Exemple #19
0
  def GetProtoRuleFormattedData(cls, rule_data, out_type):
    """Get the formatted proto dependency info for the output type.

    Args:
      rule_data: dict: The rule data for the proto rule.
      out_type: string: The type for which the proto data is to be generated.

    Return:
      dict: Corresponding rules generated for the out_type.
    """
    srcs = rule_data.get('src', set())

    protobuf_base_dir = cls.GetProtoBufBaseDir();
    out = {}
    if out_type.find('cc_') == 0 :  # Generated cc rule.
      pkg_config_cmd = ('export PKG_CONFIG_PATH=%s; '
          'pkg-config --define-variable=prefix=%s protobuf' %
          (os.path.join(protobuf_base_dir, 'lib/pkgconfig'), protobuf_base_dir))

      out['src'] = set([ cls.__GetOutFileName(x, '.pb.cc') for x in srcs ])
      out['hdr'] = set([ cls.__GetOutFileName(x, '.pb.h') for x in srcs ])
      out['flag'] = set(subprocess.getoutput(pkg_config_cmd + ' --cflags').split())
      out['link'] = set(subprocess.getoutput(pkg_config_cmd + ' --libs').split())
    else:
      TermColor.Error('Unsupported referrer type %s' % out_type)

    return out
Exemple #20
0
    def __init__(self):
        """Initialize the singleton instance."""
        self._id = Flags.ARGS.id
        self._pipeline_date = Flags.ARGS.date

        # Set the src root.
        self._pipeline_base_dir = FileUtils.GetAbsPathForFile(Flags.ARGS.root)
        if not os.path.isdir(self._pipeline_base_dir):
            TermColor.Fatal('Invalid Root directory: %s' % Flags.ARGS.root)

        # Set the pipeline specific binary directory, if specified
        self._pipeline_bin_dir = ''
        if Flags.ARGS.bin_root:
            self._pipeline_bin_dir = FileUtils.GetAbsPathForFile(
                Flags.ARGS.bin_root)

        # Set the pipeline utilities directory
        self._pipeline_utils_dir = FileUtils.GetAbsPathForFile(
            Flags.ARGS.utils_root)

        # Create all necessary directories.
        self._pipeline_output_dir = ''
        self._pipeline_log_dir = ''
        self._pipeline_publish_dir = Flags.ARGS.publish_root
        self._subdirs = {}
        self.__CreateInitialSubDirs()
        self.PrintConfig()
Exemple #21
0
 def Cleanup(self):
     """Remove the build files."""
     try:
         files = glob.glob(self.__makefile_name.replace('.main.mak', '.*'))
         for file in files:
             os.remove(file)
     except OSError as e:
         TermColor.VInfo(2, 'Could not Cleanup make files. Error: %s' % e)
Exemple #22
0
  def Flatten(cls, new_dep, referrer, referrer_data):
    """Given a new dependency, flatten it into existing

    Args:
      new_dep: string: The new dependency which needs to be flattened.
      referrer: string: The referrer for which the new dep is flattened.
      referrer_data: dict: The rule data for the referrer.

    Exceptions:
      RulesParseError: Raises exception if parsing fails.
    """
    TermColor.VInfo(5, '--- Resolving dependency %s' % new_dep)
    (libdir, libname) = os.path.split(new_dep)
    if not libdir:
      err_str = ('Cannot resolve dependency [%s] (referred to by [%s])'
                 % (Utils.RuleDisplayName(new_dep),
                    Utils.RuleDisplayName(referrer)))
      TermColor.Error(err_str)
      raise RulesParseError(err_str)

    # load the corresponding RULES file
    cls.LoadRules(libdir)

    new_dep_data = Rules.GetRule(new_dep)
    if not new_dep_data:
      err_str = 'Unable to find [%s] (referred to by [%s])' % (new_dep, referrer)
      TermColor.Error(err_str)
      raise RulesParseError(err_str)

    referre_type_base = re.sub('_.*', '', referrer_data.get('_type', 'invalid'))
    new_dep_type = new_dep_data.get('_type' , 'invalid')
    if not new_dep_type in cls.FLATTENED_RULE_TYPES.get(referre_type_base, []):
      err_str = ('Invalid rule [%s] of type [%s] (referred to by [%s])' %
                 (new_dep, new_dep_type, referrer))
      TermColor.Error(err_str)
      raise RulesParseError(err_str)

    # Merge the data.
    cls._MergeDepData(new_dep, new_dep_data, referrer, referrer_data)

    # Flatten recursively.
    for d in new_dep_data.get('dep', set()):
      if d not in referrer_data.get('dep', set()):
        with cls.LOAD_LOCK:
          referrer_data['dep'] |= set([d])
        Rules.Flatten(d, new_dep, referrer_data)
Exemple #23
0
def main():
    try:
        DepGraph.Init(Flags.PARSER)
        Flags.InitArgs()
        return DepGraph.Run()
    except KeyboardInterrupt as e:
        TermColor.Warning('KeyboardInterrupt')
        return 1
Exemple #24
0
def main():
    try:
        Cleaner.Init(Flags.PARSER)
        Flags.InitArgs()
        return Cleaner.Run()
    except KeyboardInterrupt as e:
        TermColor.Warning('KeyboardInterrupt')
        return 1
Exemple #25
0
    def _ComputeTasks(cls, targets, ignore_list=[]):
        """Computes the tasks to be evaluate given the input targets.
    Args:
      targets: list: List of input targets.
      ignore_list: list: List of strings to ignore.

    Return:
      dict{int, set(string)}: Dict from priority to set of tasks to execute at the priority.
    """
        # First create a simple task list of priority string to task.
        # Once all the tasks have been collected, then sort them to create an actual priority order.
        tasks = {}
        ignore_list += ['timeout']
        for target in targets:
            ignore = FileUtils.IgnorePath(target, ignore_list)
            if ignore:
                TermColor.Warning(
                    'Ignored target %s as anything with [%s] is ignored.' %
                    (target, ignore))
                continue

            recurse = False
            if os.path.basename(target) == '...':
                target = os.path.dirname(target)
                if not target:
                    target = FileUtils.GetAbsPathForFile(os.getcwd())
                    if target.find(PipelineConfig.Instance().pipeline_base_dir(
                    )) != 0:
                        target = PipelineConfig.Instance().pipeline_base_dir()
                recurse = True

            abs_target = FileUtils.GetAbsPathForFile(target)
            if not abs_target:
                TermColor.Warning('[%s] is not a valid path' % (target))
                continue

            if os.path.isfile(abs_target):
                cls.__AddFileToTasks(tasks, abs_target)
            elif os.path.isdir(abs_target):
                targets += FileUtils.GetFilesInDir(abs_target, recurse,
                                                   ignore_list)
            else:
                TermColor.Warning('[%s] is not supported' % (abs_target))
                continue

        return cls.__MergeTasks(tasks)
Exemple #26
0
 def get_latest_commit(cls):
   """Returns the latest commit hash"""
   commit_hash = subprocess.check_output('git log -1 --pretty=format:%H',
                                         shell=True)
   if not commit_hash:
     raise Error(TermColor.ColorStr(
       'unable to find the latest commit hash', 'RED'))
   return commit_hash
Exemple #27
0
    def Run(cls):
        """Runs the command handler.

    Return:
      int: Exit status. 0 means no error.
    """
        rules = cls._ComputeRules(Flags.ARGS.rule, Flags.ARGS.ignore_rules)
        if not rules:
            TermColor.Warning('Could not find any rules.')
            return 101

        (successful_rules, failed_rules) = cls.WorkHorse(rules)
        if successful_rules:
            TermColor.Info('')
            TermColor.Success('No. of Rules: %d' % len(successful_rules))
            TermColor.VInfo(
                1, 'Successful Rules: %s' % json.dumps(
                    Utils.RulesDisplayNames(successful_rules), indent=2))

        if failed_rules:
            TermColor.Info('')
            TermColor.Failure('No. of Rules: %d' % len(failed_rules))
            TermColor.Failure(
                'Rules: %s' %
                json.dumps(Utils.RulesDisplayNames(failed_rules), indent=2))
            return 102

        return 0
Exemple #28
0
    def Run(cls):
        """Runs the command handler.

    Return:
      int: Exit status. 0 means no error.
    """
        tasks = cls._ComputeTasks(Flags.ARGS.task, Flags.ARGS.ignore_tasks)
        # TermColor.Info('Tasks: %s' % tasks)
        # TermColor.Info('')
        # for key in tasks.iterkeys():
        #   TermColor.Info('%s: %s' % (key, tasks[key]))

        if not tasks:
            TermColor.Warning('Could not find any tasks.')
            return 101

        (successful_tasks, failed_tasks) = cls.WorkHorse(tasks)
        if successful_tasks:
            TermColor.Info('')
            TermColor.Success('No. of tasks: %d' % len(successful_tasks))
            TermColor.VInfo(
                1, 'Successful tasks: %s' %
                json.dumps(PipelineUtils.TasksDisplayNames(successful_tasks),
                           indent=2))

        if failed_tasks:
            TermColor.Info('')
            TermColor.Failure('No. of tasks: %d' % len(failed_tasks))
            TermColor.Failure('tasks: %s' % json.dumps(
                PipelineUtils.TasksDisplayNames(failed_tasks), indent=2))
            return 102

        return 0
Exemple #29
0
 def checkout_branch(cls, branch):
   """Checks out the specified branch with the latest code
   Args:
     branch (string) - the branch name
   """
   # fetches the latest code
   ret = ExecUtils.RunCmd('git fetch origin')[0]
   if not ret == 0:
     raise Error(TermColor.ColorStr('error during git fetch origin!', 'RED'))
   #subprocess.check_call(
   #  'git checkout -b %s --track origin/%s 2>/dev/null' % \
   #  (branch, branch),
   #  shell=True)
   ret = ExecUtils.RunCmd('git checkout -B %s --track origin/%s' % (
     branch, branch))[0]
   if not ret == 0:
     raise Error(TermColor.ColorStr(
       'error checking out branch %s' % branch, 'RED'))
Exemple #30
0
 def __ProcessTimedOut(proc, cmd, timeout_sec):
   """Handles timed out process. Kills the process and all its children.
   Args:
     cmd: string: The cmd that launched the proc.
     proc: subprocess.Popen: The proc created for the command.
     timeout_sec: int: The timeout for the process.
   """
   TermColor.Error('Command: %s Timed Out (%dsec)!' % (cmd, timeout_sec))
   ExecUtils.__KillSubchildren(proc.pid)