Пример #1
0
 def testFileRead_Bytes(self):
     with gclient_utils.temporary_file() as tmp:
         gclient_utils.FileWrite(tmp,
                                 b'foo \xe2\x9c bar',
                                 mode='wb',
                                 encoding=None)
         self.assertEqual('foo \ufffd bar', gclient_utils.FileRead(tmp))
Пример #2
0
    def _ensure_initialized(self):
        if self._initialized:
            return

        try:
            config = json.loads(gclient_utils.FileRead(CONFIG_FILE))
        except (IOError, ValueError):
            config = {}

        self._config = config.copy()

        if 'is-googler' not in self._config:
            # /should-upload is only accessible from Google IPs, so we only need to
            # check if we can reach the page. An external developer would get access
            # denied.
            try:
                req = urllib.urlopen(metrics_utils.APP_URL + '/should-upload')
                self._config['is-googler'] = req.getcode() == 200
            except (urllib.URLError, urllib.HTTPError):
                self._config['is-googler'] = False

        # Make sure the config variables we need are present, and initialize them to
        # safe values otherwise.
        self._config.setdefault('countdown', DEFAULT_COUNTDOWN)
        self._config.setdefault('opt-in', None)
        self._config.setdefault('version', metrics_utils.CURRENT_VERSION)

        if config != self._config:
            print(INVALID_CONFIG_WARNING, file=sys.stderr)
            self._write_config()

        self._initialized = True
Пример #3
0
  def _get_gitcookies(cls):
    gitcookies = {}
    path = cls.get_gitcookies_path()
    if not os.path.exists(path):
      return gitcookies

    try:
      f = gclient_utils.FileRead(path, 'rb').splitlines()
    except IOError:
      return gitcookies

    for line in f:
      try:
        fields = line.strip().split('\t')
        if line.strip().startswith('#') or len(fields) != 7:
          continue
        domain, xpath, key, value = fields[0], fields[2], fields[5], fields[6]
        if xpath == '/' and key == 'o':
          if value.startswith('git-'):
            login, secret_token = value.split('=', 1)
            gitcookies[domain] = (login, secret_token)
          else:
            gitcookies[domain] = ('', value)
      except (IndexError, ValueError, TypeError) as exc:
        LOGGER.warning(exc)
    return gitcookies
Пример #4
0
 def testFileRead_Unicode(self):
   with tempfile.NamedTemporaryFile(delete=False) as tmp:
     tmp.write(b'foo \xe2\x9c\x94 bar')
     # NamedTemporaryFiles must be closed on Windows before being opened again.
     tmp.close()
     try:
       self.assertEqual('foo ✔ bar', gclient_utils.FileRead(tmp.name))
     finally:
       os.remove(tmp.name)
Пример #5
0
  def Load(changename, local_root, fail_on_not_found, update_status):
    """Gets information about a changelist.

    Args:
      fail_on_not_found: if True, this function will quit the program if the
        changelist doesn't exist.
      update_status: if True, the svn status will be updated for all the files
        and unchanged files will be removed.

    Returns: a ChangeInfo object.
    """
    info_file = GetChangelistInfoFile(changename)
    if not os.path.exists(info_file):
      if fail_on_not_found:
        ErrorExit("Changelist " + changename + " not found.")
      return ChangeInfo(changename, 0, 0, '', None, local_root,
                        needs_upload=False)
    split_data = gclient_utils.FileRead(info_file, 'r').split(
        ChangeInfo._SEPARATOR, 2)
    if len(split_data) != 3:
      ErrorExit("Changelist file %s is corrupt" % info_file)
    items = split_data[0].split(', ')
    issue = 0
    patchset = 0
    needs_upload = False
    if items[0]:
      issue = int(items[0])
    if len(items) > 1:
      patchset = int(items[1])
    if len(items) > 2:
      needs_upload = (items[2] == "dirty")
    files = []
    for line in split_data[1].splitlines():
      status = line[:7]
      filename = line[7:]
      files.append((status, filename))
    description = split_data[2]
    save = False
    if update_status:
      for item in files:
        filename = os.path.join(local_root, item[1])
        status_result = SVN.CaptureStatus(filename)
        if not status_result or not status_result[0][0]:
          # File has been reverted.
          save = True
          files.remove(item)
          continue
        status = status_result[0][0]
        if status != item[0]:
          save = True
          files[files.index(item)] = (status, item[1])
    change_info = ChangeInfo(changename, issue, patchset, description, files,
                             local_root, needs_upload)
    if save:
      change_info.Save()
    return change_info
Пример #6
0
def GetGClientPrimarySolutionName(gclient_root_dir_path):
    """Returns the name of the primary solution in the .gclient file specified."""
    gclient_config_file = os.path.join(gclient_root_dir_path, '.gclient')
    gclient_config_contents = gclient_utils.FileRead(gclient_config_file)
    env = {}
    exec(gclient_config_contents, env)
    solutions = env.get('solutions', [])
    if solutions:
        return solutions[0].get('name')
    return None
Пример #7
0
  def ReadFile(self, file_item, mode='r'):
    """Reads an arbitrary file.

    Deny reading anything outside the repository.
    """
    if isinstance(file_item, AffectedFile):
      file_item = file_item.AbsoluteLocalPath()
    if not file_item.startswith(self.change.RepositoryRoot()):
      raise IOError('Access outside the repository root is denied.')
    return gclient_utils.FileRead(file_item, mode)
Пример #8
0
def read_tree(tree_root):
  """Returns a dict of all the files in a tree. Defaults to self.root_dir."""
  tree = {}
  for root, dirs, files in os.walk(tree_root):
    for d in filter(lambda x: x.startswith('.'), dirs):
      dirs.remove(d)
    for f in [join(root, f) for f in files if not f.startswith('.')]:
      filepath = f[len(tree_root) + 1:].replace(os.sep, '/')
      assert len(filepath), f
      tree[filepath] = gclient_utils.FileRead(join(root, f))
  return tree
Пример #9
0
  def Load(changename, local_root, fail_on_not_found, update_status):
    """Gets information about a changelist.

    Args:
      fail_on_not_found: if True, this function will quit the program if the
        changelist doesn't exist.
      update_status: if True, the svn status will be updated for all the files
        and unchanged files will be removed.

    Returns: a ChangeInfo object.
    """
    info_file = GetChangelistInfoFile(changename)
    if not os.path.exists(info_file):
      if fail_on_not_found:
        ErrorExit("Changelist " + changename + " not found.")
      return ChangeInfo(changename, 0, 0, '', None, local_root, None, False)
    content = gclient_utils.FileRead(info_file)
    save = False
    try:
      values = ChangeInfo._LoadNewFormat(content)
    except ValueError:
      try:
        values = ChangeInfo._LoadOldFormat(content)
        save = True
      except ValueError:
        ErrorExit(
            ('Changelist file %s is corrupt.\n'
            'Either run "gcl delete %s" or manually edit the file') % (
                info_file, changename))
    files = values['files']
    if update_status:
      for item in files[:]:
        status_result = SVN.CaptureStatus(item[1], local_root)
        if not status_result or not status_result[0][0]:
          # File has been reverted.
          save = True
          files.remove(item)
          continue
        status = status_result[0][0]
        if status != item[0]:
          save = True
          files[files.index(item)] = (status, item[1])
    change_info = ChangeInfo(
        changename,
        values['issue'],
        values['patchset'],
        values['description'],
        files,
        local_root,
        values.get('rietveld'),
        values['needs_upload'])
    if save:
      change_info.Save()
    return change_info
Пример #10
0
    def ReadRootFile(self, filename):
        cur = self.checkout_root
        root = self.toplevel_root or self.checkout_root

        assert cur.startswith(root), (root, cur)
        while cur.startswith(root):
            filepath = os.path.join(cur, filename)
            if os.path.isfile(filepath):
                logging.info('Found %s at %s' % (filename, cur))
                return gclient_utils.FileRead(filepath)
            cur = os.path.dirname(cur)
        logging.warning('Didn\'t find %s' % filename)
        return None
Пример #11
0
 def ReadRootFile(self, filename):
     if not self.options.root:
         filepath = os.path.join(self.checkout_root, filename)
         if os.path.isfile(filepath):
             logging.info('Found %s at %s' % (filename, self.checkout_root))
             return gclient_utils.FileRead(filepath)
         return None
     cur = os.path.abspath(self.checkout_root)
     if self.gclient_root:
         root = os.path.abspath(self.gclient_root)
     else:
         root = gclient_utils.FindGclientRoot(cur)
     if not root:
         root = cur
     assert cur.startswith(root), (root, cur)
     while cur.startswith(root):
         filepath = os.path.join(cur, filename)
         if os.path.isfile(filepath):
             logging.info('Found %s at %s' % (filename, cur))
             return gclient_utils.FileRead(filepath)
         cur = os.path.dirname(cur)
     logging.warning('Didn\'t find %s' % filename)
     return None
Пример #12
0
    def NewContents(self):
        """Returns an iterator over the lines in the new version of file.

    The new version is the file in the user's workspace, i.e. the "right hand
    side".

    Contents will be empty if the file is a directory or does not exist.
    Note: The cariage returns (LF or CR) are stripped off.
    """
        if self.IsDirectory():
            return []
        else:
            return gclient_utils.FileRead(self.AbsoluteLocalPath(),
                                          'rU').splitlines()
Пример #13
0
    def _ReadEntries(self):
        """Read the .gclient_entries file for the given client.

    Args:
      client: The client for which the entries file should be read.

    Returns:
      A sequence of solution names, which will be empty if there is the
      entries file hasn't been created yet.
    """
        scope = {}
        filename = os.path.join(self._root_dir, self._options.entries_filename)
        if not os.path.exists(filename):
            return []
        exec(gclient_utils.FileRead(filename), scope)
        return scope["entries"]
Пример #14
0
    def ParseTasksFile(self):
        """Parses the tasks file for this tasks."""

        tasks_content = None
        use_strict = False
        filepath = os.path.join(self.root_dir, self.tasks_file)
        if not os.path.isfile(filepath):
            logging.info('ParseTasksFile(%s): No %s file found at %s' %
                         (self.name, self.tasks_file, filepath))
        else:
            tasks_content = gclient_utils.FileRead(filepath)
            logging.debug('ParseTasksFile(%s) read:\n%s' %
                          (self.name, tasks_content))
            use_strict = 'use strict' in tasks_content.splitlines()[0]

        local_scope = {}
        if tasks_content:
            # One thing is unintuitive, vars = {} must happen before Var() use.
            var = self.VarImpl(self.custom_vars, local_scope)
            if use_strict:
                logging.info('ParseTasksFile(%s): Strict Mode Enabled',
                             self.name)
                global_scope = {
                    '__builtins__': {
                        'None': None
                    },
                    'Var': var.Lookup,
                    'tasks_os': {},
                }
            else:
                global_scope = {
                    'Var': var.Lookup,
                    'tasks_os': {},
                }
            # Eval the content.
            try:
                exec(tasks_content, global_scope, local_scope)
            except SyntaxError, e:
                gclient_utils.SyntaxErrorToError(filepath, e)
            if use_strict:
                for key, val in local_scope.iteritems():
                    if not isinstance(val, (dict, list, tuple, str)):
                        raise gclient_utils.Error(
                            'ParseTasksFile(%s): Strict mode disallows %r -> %r'
                            % (self.name, key, val))
Пример #15
0
def DoGetTrySlaves(change,
                   changed_files,
                   repository_root,
                   default_presubmit,
                   project,
                   verbose,
                   output_stream):
  """Get the list of try servers from the presubmit scripts.

  Args:
    changed_files: List of modified files.
    repository_root: The repository root.
    default_presubmit: A default presubmit script to execute in any case.
    project: Optional name of a project used in selecting trybots.
    verbose: Prints debug info.
    output_stream: A stream to write debug output to.

  Return:
    List of try slaves
  """
  presubmit_files = ListRelevantPresubmitFiles(changed_files, repository_root)
  if not presubmit_files and verbose:
    output_stream.write("Warning, no presubmit.py found.\n")
  results = []
  executer = GetTrySlavesExecuter()
  if default_presubmit:
    if verbose:
      output_stream.write("Running default presubmit script.\n")
    fake_path = os.path.join(repository_root, 'PRESUBMIT.py')
    results += executer.ExecPresubmitScript(
        default_presubmit, fake_path, project, change)
  for filename in presubmit_files:
    filename = os.path.abspath(filename)
    if verbose:
      output_stream.write("Running %s\n" % filename)
    # Accept CRLF presubmit script.
    presubmit_script = gclient_utils.FileRead(filename, 'rU')
    results += executer.ExecPresubmitScript(
        presubmit_script, filename, project, change)

  slaves = list(set(results))
  if slaves and verbose:
    output_stream.write(', '.join(slaves))
    output_stream.write('\n')
  return slaves
Пример #16
0
  def NewContents(self):
    """Returns an iterator over the lines in the new version of file.

    The new version is the file in the user's workspace, i.e. the "right hand
    side".

    Contents will be empty if the file is a directory or does not exist.
    Note: The carriage returns (LF or CR) are stripped off.
    """
    if self._cached_new_contents is None:
      self._cached_new_contents = []
      if not self.IsDirectory():
        try:
          self._cached_new_contents = gclient_utils.FileRead(
              self.AbsoluteLocalPath(), 'rU').splitlines()
        except IOError:
          pass  # File not found?  That's fine; maybe it was deleted.
    return self._cached_new_contents[:]
Пример #17
0
def FindGclientRoot(from_dir, filename='.gclient'):
    """Tries to find the gclient root."""
    real_from_dir = os.path.abspath(from_dir)
    path = real_from_dir
    while not os.path.exists(os.path.join(path, filename)):
        split_path = os.path.split(path)
        if not split_path[1]:
            return None
        path = split_path[0]

    logging.info('Found gclient root at ' + path)

    if path == real_from_dir:
        return path

    # If we did not find the file in the current directory, make sure we are in a
    # sub directory that is controlled by this configuration.
    entries_filename = os.path.join(path, filename + '_entries')
    if not os.path.exists(entries_filename):
        # If .gclient_entries does not exist, a previous call to gclient sync
        # might have failed. In that case, we cannot verify that the .gclient
        # is the one we want to use. In order to not to cause too much trouble,
        # just issue a warning and return the path anyway.
        print(
            "%s missing, %s file in parent directory %s might not be the file "
            "you want to use." % (entries_filename, filename, path),
            file=sys.stderr)
        return path

    entries_content = gclient_utils.FileRead(entries_filename)
    scope = {}
    try:
        exec(entries_content, scope)
    except (SyntaxError, Exception) as e:
        gclient_utils.SyntaxErrorToError(filename, e)

    all_directories = scope['entries'].keys()
    path_to_check = os.path.relpath(real_from_dir, path)
    while path_to_check:
        if path_to_check in all_directories:
            return path
        path_to_check = os.path.dirname(path_to_check)

    return None
Пример #18
0
def GenFakeDiff(filename):
    """Generates a fake diff from a file."""
    file_content = gclient_utils.FileRead(filename, 'rb').splitlines(True)
    filename = filename.replace(os.sep, '/')
    nb_lines = len(file_content)
    # We need to use / since patch on unix will fail otherwise.
    data = io.StringIO()
    data.write("Index: %s\n" % filename)
    data.write('=' * 67 + '\n')
    # Note: Should we use /dev/null instead?
    data.write("--- %s\n" % filename)
    data.write("+++ %s\n" % filename)
    data.write("@@ -0,0 +1,%d @@\n" % nb_lines)
    # Prepend '+' to every lines.
    for line in file_content:
        data.write('+')
        data.write(line)
    result = data.getvalue()
    data.close()
    return result
Пример #19
0
def GetCachedFile(filename, max_age=60*60*24*3, use_root=False):
  """Retrieves a file from the repository and caches it in GetCacheDir() for
  max_age seconds.

  use_root: If False, look up the arborescence for the first match, otherwise go
            directory to the root repository.

  Note: The cache will be inconsistent if the same file is retrieved with both
        use_root=True and use_root=False. Don't be stupid.
  """
  if filename not in FILES_CACHE:
    # Don't try to look up twice.
    FILES_CACHE[filename] = None
    # First we check if we have a cached version.
    try:
      cached_file = os.path.join(GetCacheDir(), filename)
    except gclient_utils.Error:
      return None
    if (not os.path.exists(cached_file) or
        (time.time() - os.stat(cached_file).st_mtime) > max_age):
      dir_info = SVN.CaptureInfo('.')
      repo_root = dir_info['Repository Root']
      if use_root:
        url_path = repo_root
      else:
        url_path = dir_info['URL']
      while True:
        # Look in the repository at the current level for the file.
        for _ in range(5):
          content = None
          try:
            # Take advantage of the fact that svn won't output to stderr in case
            # of success but will do in case of failure so don't mind putting
            # stderr into content_array.
            content_array = []
            svn_path = url_path + '/' + filename
            args = ['svn', 'cat', svn_path]
            if sys.platform != 'darwin':
              # MacOSX 10.5.2 has a bug with svn 1.4.4 that will trigger the
              # 'Can\'t get username or password' and can be fixed easily.
              # The fix doesn't work if the user upgraded to svn 1.6.x. Bleh.
              # I don't have time to fix their broken stuff.
              args.append('--non-interactive')
            gclient_utils.CheckCallAndFilter(
                args, cwd='.', filter_fn=content_array.append)
            # Exit the loop if the file was found. Override content.
            content = '\n'.join(content_array)
            break
          except gclient_utils.Error:
            if content_array[0].startswith(
                'svn: Can\'t get username or password'):
              ErrorExit('Your svn credentials expired. Please run svn update '
                        'to fix the cached credentials')
            if content_array[0].startswith('svn: Can\'t get password'):
              ErrorExit('If are using a Mac and svn --version shows 1.4.x, '
                  'please hack gcl.py to remove --non-interactive usage, it\'s'
                  'a bug on your installed copy')
            if not content_array[0].startswith('svn: File not found:'):
              # Try again.
              continue
        if content:
          break
        if url_path == repo_root:
          # Reached the root. Abandoning search.
          break
        # Go up one level to try again.
        url_path = os.path.dirname(url_path)
      if content is not None or filename != CODEREVIEW_SETTINGS_FILE:
        # Write a cached version even if there isn't a file, so we don't try to
        # fetch it each time. codereview.settings must always be present so do
        # not cache negative.
        gclient_utils.FileWrite(cached_file, content or '')
    else:
      content = gclient_utils.FileRead(cached_file, 'r')
    # Keep the content cached in memory.
    FILES_CACHE[filename] = content
  return FILES_CACHE[filename]
Пример #20
0
def TryChange(argv, change, swallow_exception, prog=None, extra_epilog=None):
    """
  Args:
    argv: Arguments and options.
    change: Change instance corresponding to the CL.
    swallow_exception: Whether we raise or swallow exceptions.
  """
    parser = gen_parser(prog)
    epilog = EPILOG % {'prog': prog}
    if extra_epilog:
        epilog += extra_epilog
    parser.epilog = epilog

    options, args = parser.parse_args(argv)

    # If they've asked for help, give it to them
    if len(args) == 1 and args[0] == 'help':
        parser.print_help()
        return 0

    # If they've said something confusing, don't spawn a try job until you
    # understand what they want.
    if args:
        parser.error('Extra argument(s) "%s" not understood' % ' '.join(args))

    if options.dry_run:
        options.verbose += 1

    LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s'
    if not swallow_exception:
        if options.verbose == 0:
            logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT)
        elif options.verbose == 1:
            logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
        elif options.verbose > 1:
            logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)

    logging.debug(argv)

    if (options.patchlevel is not None
            and (options.patchlevel < 0 or options.patchlevel > 10)):
        parser.error(
            'Have you tried --port instead? You probably confused -p and -P.')

    # Strip off any @ in the user, otherwise svn gets confused.
    options.user = options.user.split('@', 1)[0]

    if options.rietveld_url:
        # Try to extract the review number if possible and fix the protocol.
        if not '://' in options.rietveld_url:
            options.rietveld_url = 'http://' + options.rietveld_url
        match = re.match(r'^(.*)/(\d+)/?$', options.rietveld_url)
        if match:
            if options.issue or options.patchset:
                parser.error(
                    'Cannot use both --issue and use a review number url')
            options.issue = int(match.group(2))
            options.rietveld_url = match.group(1)

    try:
        changed_files = None
        # Always include os.getcwd() in the checkout settings.
        path = os.getcwd()

        file_list = []
        if options.files:
            file_list = options.files
        elif change:
            file_list = [f.LocalPath() for f in change.AffectedFiles()]

        if options.upstream_branch:
            path += '@' + options.upstream_branch
            # Clear file list so that the correct list will be retrieved from the
            # upstream branch.
            file_list = []

        current_vcs = GuessVCS(options, path, file_list)
        current_vcs.AutomagicalSettings()
        options = current_vcs.options
        vcs_is_git = type(current_vcs) is GIT

        # So far, git_repo doesn't work with SVN
        if options.git_repo and not vcs_is_git:
            parser.error(
                '--git_repo option is supported only for GIT repositories')

        # If revision==auto, resolve it
        if options.revision and options.revision.lower() == 'auto':
            if not vcs_is_git:
                parser.error(
                    '--revision=auto is supported only for GIT repositories')
            options.revision = scm.GIT.Capture(
                ['rev-parse', current_vcs.diff_against], cwd=path)

        checkouts = [current_vcs]
        for item in options.sub_rep:
            # Pass file_list=None because we don't know the sub repo's file list.
            checkout = GuessVCS(options,
                                os.path.join(current_vcs.checkout_root, item),
                                None)
            if checkout.checkout_root in [c.checkout_root for c in checkouts]:
                parser.error('Specified the root %s two times.' %
                             checkout.checkout_root)
            checkouts.append(checkout)

        can_http = options.port and options.host
        can_svn = options.svn_repo
        can_git = options.git_repo
        can_gerrit = options.gerrit_url
        can_something = can_http or can_svn or can_git or can_gerrit
        # If there was no transport selected yet, now we must have enough data to
        # select one.
        if not options.send_patch and not can_something:
            parser.error('Please specify an access method.')

        # Convert options.diff into the content of the diff.
        if options.url:
            if options.files:
                parser.error(
                    'You cannot specify files and --url at the same time.')
            options.diff = urllib2.urlopen(options.url).read()
        elif options.diff:
            if options.files:
                parser.error(
                    'You cannot specify files and --diff at the same time.')
            options.diff = gclient_utils.FileRead(options.diff, 'rb')
        elif options.issue and options.patchset is None:
            # Retrieve the patch from rietveld when the diff is not specified.
            # When patchset is specified, it's because it's done by gcl/git-try.
            api_url = '%s/api/%d' % (options.rietveld_url, options.issue)
            logging.debug(api_url)
            contents = json.loads(urllib2.urlopen(api_url).read())
            options.patchset = contents['patchsets'][-1]
            diff_url = (
                '%s/download/issue%d_%d.diff' %
                (options.rietveld_url, options.issue, options.patchset))
            diff = GetMungedDiff('', urllib2.urlopen(diff_url).readlines())
            options.diff = ''.join(diff[0])
            changed_files = diff[1]
        else:
            # Use this as the base.
            root = checkouts[0].checkout_root
            diffs = []
            for checkout in checkouts:
                raw_diff = checkout.GenerateDiff()
                if not raw_diff:
                    continue
                diff = raw_diff.splitlines(True)
                path_diff = gclient_utils.PathDifference(
                    root, checkout.checkout_root)
                # Munge it.
                diffs.extend(GetMungedDiff(path_diff, diff)[0])
            if not diffs:
                logging.error('Empty or non-existant diff, exiting.')
                return 1
            options.diff = ''.join(diffs)

        if not options.name:
            if options.issue:
                options.name = 'Issue %s' % options.issue
            else:
                options.name = 'Unnamed'
                print('Note: use --name NAME to change the try job name.')

        if not options.email:
            parser.error(
                'Using an anonymous checkout. Please use --email or set '
                'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.')
        print('Results will be emailed to: ' + options.email)

        if options.bot:
            bot_spec = _ApplyTestFilter(
                options.testfilter,
                _ParseBotList(options.bot, options.testfilter))
        else:
            bot_spec = _GenTSBotSpec(checkouts, change, changed_files, options)

        if options.testfilter:
            bot_spec = _ApplyTestFilter(options.testfilter, bot_spec)

        if any('triggered' in b[0] for b in bot_spec):
            print >> sys.stderr, (
                'ERROR You are trying to send a job to a triggered bot.  This type of'
                ' bot requires an\ninitial job from a parent (usually a builder).  '
                'Instead send your job to the parent.\nBot list: %s' %
                bot_spec)
            return 1

        if options.print_bots:
            print 'Bots which would be used:'
            for bot in bot_spec:
                if bot[1]:
                    print '  %s:%s' % (bot[0], ','.join(bot[1]))
                else:
                    print '  %s' % (bot[0])
            return 0

        # Determine sending protocol
        if options.send_patch:
            # If forced.
            senders = [options.send_patch]
        else:
            # Try sending patch using avaialble protocols
            all_senders = [
                (_SendChangeHTTP, can_http),
                (_SendChangeSVN, can_svn),
                (_SendChangeGerrit, can_gerrit),
                (_SendChangeGit, can_git),
            ]
            senders = [sender for sender, can in all_senders if can]

        # Send the patch.
        for sender in senders:
            try:
                sender(bot_spec, options)
                return 0
            except NoTryServerAccess:
                is_last = sender == senders[-1]
                if is_last:
                    raise
        assert False, "Unreachable code"
    except Error, e:
        if swallow_exception:
            return 1
        print >> sys.stderr, e
        return 1
Пример #21
0
def CheckChange(input_api, message_constructor):
    """Checks for files with a modified contents.

  Some checking of validator happens on builbots, but comprehensive enumeration
  tests must be run locally.

  There are two dangers:
    1. Source code for autogenerated files can be modified without regeneration
       of said files.
    2. Source of validator can be changed without running the aforementioned
       tests.

  This function catches the situation when source files for validator_x86_??.c
  are changed but files are not regenerated and it also catches the situation
  when code is changed without running the dfacheckvalidator tests.
  """

    errors = []

    changelist = input_api.change

    root_path = changelist.RepositoryRoot()

    if input_api.change.scm == 'svn':
        # With SVN you can decide to commit not all modified files but some of them
        # thus separate GetAllModifiedFiles() and GetModifiedFiles() lists are
        # provided.  We need to remove root_path from the name of file.
        assert all(
            filename.startswith(root_path + os.path.sep)
            for filename in changelist.GetAllModifiedFiles())
        all_filenames = [
            filename[len(root_path + os.path.sep):]
            for filename in changelist.GetAllModifiedFiles()
        ]

        assert all(
            filename.startswith(root_path + os.path.sep)
            for filename in changelist.GetModifiedFiles())
        modified_filenames = [
            filename[len(root_path + os.path.sep):]
            for filename in changelist.GetModifiedFiles()
        ]
    else:
        # With GIT you must commit all modified files thus only AffectedFiles()
        # list is provided.
        all_filenames = [
            file.LocalPath() for file in changelist.AffectedFiles()
        ]
        modified_filenames = all_filenames

    json_filename = os.path.join('src', 'trusted', 'validator_ragel', 'gen',
                                 'protected_files.json')

    protected_files = json.loads(
        gclient_utils.FileRead(os.path.join(root_path, json_filename)))

    need_dfagen = False
    need_dfacheckvalidator = False

    canonical_prefix = 'native_client/'

    for filename in sorted(all_filenames):
        canonical_filename = canonical_prefix + filename.replace('\\', '/')
        if canonical_filename in protected_files['validator']:
            file_contents = gclient_utils.FileRead(
                os.path.join(root_path, filename))
            sha512 = hashlib.sha512(file_contents).hexdigest()
            if sha512 != protected_files['validator'][canonical_filename]:
                errors.append(
                    message_constructor(
                        'Incorrect {0} hash:\n  expected {1}\n       got {2}'.
                        format(
                            canonical_filename,
                            protected_files['validator'][canonical_filename],
                            sha512)))
            need_dfacheckvalidator = True
        if canonical_filename in protected_files['generating']:
            for automaton_filename in protected_files['generated']:
                if (os.stat(os.path.join(
                        root_path, filename)).st_mtime > os.stat(
                            os.path.join(
                                root_path,
                                automaton_filename[len(canonical_prefix):])).
                        st_mtime):
                    errors.append(
                        message_constructor(
                            'File {0} is older then {1}'.format(
                                automaton_filename, canonical_filename)))
                    need_dfagen = True
        if (canonical_filename in protected_files['validator']
                or canonical_filename in protected_files['generating']
                or filename == json_filename):
            if filename not in modified_filenames:
                errors.append(
                    message_constructor(
                        'File {0} is changed but is excluded from this CL'.
                        format(canonical_filename)))

    if need_dfagen:
        errors.append(
            message_constructor('Please run "./scons dfagen" before commit!'))

    if need_dfacheckvalidator:
        errors.append(
            message_constructor(
                'Please run "./scons dfacheckvalidator" before commit!'))

    return errors
Пример #22
0
def TryChange(argv,
              file_list,
              swallow_exception,
              prog=None,
              extra_epilog=None):
    """
  Args:
    argv: Arguments and options.
    file_list: Default value to pass to --file.
    swallow_exception: Whether we raise or swallow exceptions.
  """
    # Parse argv
    parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog)
    epilog = EPILOG % {'prog': prog}
    if extra_epilog:
        epilog += extra_epilog
    parser.epilog = epilog
    # Remove epilog formatting
    parser.format_epilog = lambda x: parser.epilog
    parser.add_option("-v",
                      "--verbose",
                      action="count",
                      default=0,
                      help="Prints debugging infos")
    group = optparse.OptionGroup(parser, "Result and status")
    group.add_option("-u",
                     "--user",
                     default=getpass.getuser(),
                     help="Owner user name [default: %default]")
    group.add_option(
        "-e",
        "--email",
        default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS',
                               os.environ.get('EMAIL_ADDRESS')),
        help="Email address where to send the results. Use either "
        "the TRYBOT_RESULTS_EMAIL_ADDRESS environment "
        "variable or EMAIL_ADDRESS to set the email address "
        "the try bots report results to [default: %default]")
    group.add_option("-n", "--name", help="Descriptive name of the try job")
    group.add_option("--issue",
                     type='int',
                     help="Update rietveld issue try job status")
    group.add_option("--patchset",
                     type='int',
                     help="Update rietveld issue try job status. This is "
                     "optional if --issue is used, In that case, the "
                     "latest patchset will be used.")
    group.add_option("--dry_run",
                     action='store_true',
                     help="Don't send the try job. This implies --verbose, so "
                     "it will print the diff.")
    parser.add_option_group(group)

    group = optparse.OptionGroup(parser, "Try job options")
    group.add_option("-b",
                     "--bot",
                     action="append",
                     help="Only use specifics build slaves, ex: "
                     "'--bot win,layout_mac'; see the try "
                     "server waterfall for the slave's name")
    group.add_option("-r",
                     "--revision",
                     help="Revision to use for the try job; default: the "
                     "revision will be determined by the try server; see "
                     "its waterfall for more info")
    group.add_option("-c",
                     "--clobber",
                     action="store_true",
                     help="Force a clobber before building; e.g. don't do an "
                     "incremental build")
    # TODO(maruel): help="Select a specific configuration, usually 'debug' or "
    #                    "'release'"
    group.add_option("--target", help=optparse.SUPPRESS_HELP)

    group.add_option(
        "--project",
        help="Override which project to use. Projects are defined "
        "server-side to define what default bot set to use")

    group.add_option(
        "-t",
        "--testfilter",
        action="append",
        help="Add a gtest_filter to a test. Use multiple times to "
        "specify filters for different tests. (i.e. "
        "--testfilter base_unittests:ThreadTest.* "
        "--testfilter ui_tests) If you specify any testfilters "
        "the test results will not be reported in rietveld and "
        "only tests with filters will run.")

    parser.add_option_group(group)

    group = optparse.OptionGroup(parser, "Patch to run")
    group.add_option("-f",
                     "--file",
                     default=file_list,
                     dest="files",
                     metavar="FILE",
                     action="append",
                     help="Use many times to list the files to include in the "
                     "try, relative to the repository root")
    group.add_option("--diff", help="File containing the diff to try")
    group.add_option("--url",
                     help="Url where to grab a patch, e.g. "
                     "http://example.com/x.diff")
    group.add_option("-R",
                     "--rietveld_url",
                     default="codereview.appspot.com",
                     metavar="URL",
                     help="Has 2 usages, both refer to the rietveld instance: "
                     "Specify which code review patch to use as the try job "
                     "or rietveld instance to update the try job results "
                     "Default:%default")
    group.add_option("--root",
                     help="Root to use for the patch; base subdirectory for "
                     "patch created in a subdirectory")
    group.add_option("-p",
                     "--patchlevel",
                     type='int',
                     metavar="LEVEL",
                     help="Used as -pN parameter to patch")
    group.add_option("-s",
                     "--sub_rep",
                     action="append",
                     default=[],
                     help="Subcheckout to use in addition. This is mainly "
                     "useful for gclient-style checkouts. In git, checkout "
                     "the branch with changes first. Use @rev or "
                     "@branch to specify the "
                     "revision/branch to diff against. If no @branch is "
                     "given the diff will be against the upstream branch. "
                     "If @branch then the diff is branch..HEAD. "
                     "All edits must be checked in.")
    group.add_option("--no_gclient",
                     action="store_true",
                     help="Disable automatic search for gclient checkout.")
    group.add_option(
        "-E",
        "--exclude",
        action="append",
        default=['ChangeLog'],
        metavar='REGEXP',
        help="Regexp patterns to exclude files. Default: %default")
    group.add_option("--upstream_branch",
                     action="store",
                     help="Specify the upstream branch to diff against in the "
                     "main checkout")
    parser.add_option_group(group)

    group = optparse.OptionGroup(parser, "Access the try server by HTTP")
    group.add_option("--use_http",
                     action="store_const",
                     const=_SendChangeHTTP,
                     dest="send_patch",
                     help="Use HTTP to talk to the try server [default]")
    group.add_option("-H", "--host", help="Host address")
    group.add_option("-P", "--port", help="HTTP port")
    group.add_option("--proxy", help="HTTP proxy")
    parser.add_option_group(group)

    group = optparse.OptionGroup(parser, "Access the try server with SVN")
    group.add_option("--use_svn",
                     action="store_const",
                     const=_SendChangeSVN,
                     dest="send_patch",
                     help="Use SVN to talk to the try server")
    group.add_option(
        "-S",
        "--svn_repo",
        metavar="SVN_URL",
        help="SVN url to use to write the changes in; --use_svn is "
        "implied when using --svn_repo")
    parser.add_option_group(group)

    options, args = parser.parse_args(argv)

    # If they've asked for help, give it to them
    if len(args) == 1 and args[0] == 'help':
        parser.print_help()
        return 0

    # If they've said something confusing, don't spawn a try job until you
    # understand what they want.
    if args:
        parser.error('Extra argument(s) "%s" not understood' % ' '.join(args))

    if options.dry_run:
        options.verbose += 1

    LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s'
    if not swallow_exception:
        if options.verbose == 0:
            logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT)
        elif options.verbose == 1:
            logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
        elif options.verbose > 1:
            logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)

    logging.debug(argv)

    # Strip off any @ in the user, otherwise svn gets confused.
    options.user = options.user.split('@', 1)[0]

    if options.rietveld_url:
        # Try to extract the review number if possible and fix the protocol.
        if not '://' in options.rietveld_url:
            options.rietveld_url = 'http://' + options.rietveld_url
        match = re.match(r'^(.*)/(\d+)/?$', options.rietveld_url)
        if match:
            if options.issue or options.patchset:
                parser.error(
                    'Cannot use both --issue and use a review number url')
            options.issue = int(match.group(2))
            options.rietveld_url = match.group(1)

    try:
        # Always include os.getcwd() in the checkout settings.
        checkouts = []
        path = os.getcwd()
        if options.upstream_branch:
            path += '@' + options.upstream_branch
        checkouts.append(GuessVCS(options, path))
        checkouts[0].AutomagicalSettings()
        for item in options.sub_rep:
            checkout = GuessVCS(options,
                                os.path.join(checkouts[0].checkout_root, item))
            if checkout.checkout_root in [c.checkout_root for c in checkouts]:
                parser.error('Specified the root %s two times.' %
                             checkout.checkout_root)
            checkouts.append(checkout)

        can_http = options.port and options.host
        can_svn = options.svn_repo
        # If there was no transport selected yet, now we must have enough data to
        # select one.
        if not options.send_patch and not (can_http or can_svn):
            parser.error('Please specify an access method.')

        # Convert options.diff into the content of the diff.
        if options.url:
            if options.files:
                parser.error(
                    'You cannot specify files and --url at the same time.')
            options.diff = urllib.urlopen(options.url).read()
        elif options.diff:
            if options.files:
                parser.error(
                    'You cannot specify files and --diff at the same time.')
            options.diff = gclient_utils.FileRead(options.diff, 'rb')
        elif options.issue and options.patchset is None:
            # Retrieve the patch from rietveld when the diff is not specified.
            # When patchset is specified, it's because it's done by gcl/git-try.
            if json is None:
                parser.error(
                    'json or simplejson library is missing, please install.')
            api_url = '%s/api/%d' % (options.rietveld_url, options.issue)
            logging.debug(api_url)
            contents = json.loads(urllib.urlopen(api_url).read())
            options.patchset = contents['patchsets'][-1]
            diff_url = (
                '%s/download/issue%d_%d.diff' %
                (options.rietveld_url, options.issue, options.patchset))
            diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines())
            options.diff = ''.join(diff)
        else:
            # Use this as the base.
            root = checkouts[0].checkout_root
            diffs = []
            for checkout in checkouts:
                diff = checkout.GenerateDiff().splitlines(True)
                path_diff = gclient_utils.PathDifference(
                    root, checkout.checkout_root)
                # Munge it.
                diffs.extend(GetMungedDiff(path_diff, diff))
            options.diff = ''.join(diffs)

        if not options.bot:
            # Get try slaves from PRESUBMIT.py files if not specified.
            # Even if the diff comes from options.url, use the local checkout for bot
            # selection.
            try:
                import presubmit_support
                root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py')
                options.bot = presubmit_support.DoGetTrySlaves(
                    checkouts[0].GetFileNames(), checkouts[0].checkout_root,
                    root_presubmit, options.project, False, sys.stdout)
            except ImportError:
                pass
            # If no bot is specified, either the default pool will be selected or the
            # try server will refuse the job. Either case we don't need to interfere.

        if options.name is None:
            if options.issue:
                options.name = 'Issue %s' % options.issue
            else:
                options.name = 'Unnamed'
                print('Note: use --name NAME to change the try job name.')
        if not options.email:
            parser.error(
                'Using an anonymous checkout. Please use --email or set '
                'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.')
        else:
            print('Results will be emailed to: ' + options.email)

        # Prevent rietveld updates if we aren't running all the tests.
        if options.testfilter is not None:
            options.issue = None
            options.patchset = None

        # Send the patch.
        if options.send_patch:
            # If forced.
            options.send_patch(options)
            PrintSuccess(options)
            return 0
        try:
            if can_http:
                _SendChangeHTTP(options)
                PrintSuccess(options)
                return 0
        except NoTryServerAccess:
            if not can_svn:
                raise
        _SendChangeSVN(options)
        PrintSuccess(options)
        return 0
    except (InvalidScript, NoTryServerAccess), e:
        if swallow_exception:
            return 1
        print >> sys.stderr, e
        return 1
Пример #23
0
def TryChange(argv, change, swallow_exception, prog=None, extra_epilog=None):
    """
  Args:
    argv: Arguments and options.
    change: Change instance corresponding to the CL.
    swallow_exception: Whether we raise or swallow exceptions.
  """
    parser = gen_parser(prog)
    epilog = EPILOG % {'prog': prog}
    if extra_epilog:
        epilog += extra_epilog
    parser.epilog = epilog
    # Remove epilog formatting
    parser.format_epilog = lambda x: parser.epilog

    options, args = parser.parse_args(argv)

    # If they've asked for help, give it to them
    if len(args) == 1 and args[0] == 'help':
        parser.print_help()
        return 0

    # If they've said something confusing, don't spawn a try job until you
    # understand what they want.
    if args:
        parser.error('Extra argument(s) "%s" not understood' % ' '.join(args))

    if options.dry_run:
        options.verbose += 1

    LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s'
    if not swallow_exception:
        if options.verbose == 0:
            logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT)
        elif options.verbose == 1:
            logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
        elif options.verbose > 1:
            logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)

    logging.debug(argv)

    if (options.patchlevel is not None
            and (options.patchlevel < 0 or options.patchlevel > 10)):
        parser.error(
            'Have you tried --port instead? You probably confused -p and -P.')

    # Strip off any @ in the user, otherwise svn gets confused.
    options.user = options.user.split('@', 1)[0]

    if options.rietveld_url:
        # Try to extract the review number if possible and fix the protocol.
        if not '://' in options.rietveld_url:
            options.rietveld_url = 'http://' + options.rietveld_url
        match = re.match(r'^(.*)/(\d+)/?$', options.rietveld_url)
        if match:
            if options.issue or options.patchset:
                parser.error(
                    'Cannot use both --issue and use a review number url')
            options.issue = int(match.group(2))
            options.rietveld_url = match.group(1)

    try:
        changed_files = None
        # Always include os.getcwd() in the checkout settings.
        checkouts = []
        path = os.getcwd()

        file_list = []
        if options.files:
            file_list = options.files
        elif change:
            file_list = [f.LocalPath() for f in change.AffectedFiles()]

        if options.upstream_branch:
            path += '@' + options.upstream_branch
            # Clear file list so that the correct list will be retrieved from the
            # upstream branch.
            file_list = []
        checkouts.append(GuessVCS(options, path, file_list))
        checkouts[0].AutomagicalSettings()
        for item in options.sub_rep:
            # Pass file_list=None because we don't know the sub repo's file list.
            checkout = GuessVCS(options,
                                os.path.join(checkouts[0].checkout_root, item),
                                None)
            if checkout.checkout_root in [c.checkout_root for c in checkouts]:
                parser.error('Specified the root %s two times.' %
                             checkout.checkout_root)
            checkouts.append(checkout)

        can_http = options.port and options.host
        can_svn = options.svn_repo
        # If there was no transport selected yet, now we must have enough data to
        # select one.
        if not options.send_patch and not (can_http or can_svn):
            parser.error('Please specify an access method.')

        # Convert options.diff into the content of the diff.
        if options.url:
            if options.files:
                parser.error(
                    'You cannot specify files and --url at the same time.')
            options.diff = urllib.urlopen(options.url).read()
        elif options.diff:
            if options.files:
                parser.error(
                    'You cannot specify files and --diff at the same time.')
            options.diff = gclient_utils.FileRead(options.diff, 'rb')
        elif options.issue and options.patchset is None:
            # Retrieve the patch from rietveld when the diff is not specified.
            # When patchset is specified, it's because it's done by gcl/git-try.
            api_url = '%s/api/%d' % (options.rietveld_url, options.issue)
            logging.debug(api_url)
            contents = json.loads(urllib.urlopen(api_url).read())
            options.patchset = contents['patchsets'][-1]
            diff_url = (
                '%s/download/issue%d_%d.diff' %
                (options.rietveld_url, options.issue, options.patchset))
            diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines())
            options.diff = ''.join(diff[0])
            changed_files = diff[1]
        else:
            # Use this as the base.
            root = checkouts[0].checkout_root
            diffs = []
            for checkout in checkouts:
                raw_diff = checkout.GenerateDiff()
                if not raw_diff:
                    logging.error('Empty or non-existant diff, exiting.')
                    return 1
                diff = raw_diff.splitlines(True)
                path_diff = gclient_utils.PathDifference(
                    root, checkout.checkout_root)
                # Munge it.
                diffs.extend(GetMungedDiff(path_diff, diff)[0])
            options.diff = ''.join(diffs)

        if not options.name:
            if options.issue:
                options.name = 'Issue %s' % options.issue
            else:
                options.name = 'Unnamed'
                print('Note: use --name NAME to change the try job name.')

        if not options.email:
            parser.error(
                'Using an anonymous checkout. Please use --email or set '
                'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.')
        print('Results will be emailed to: ' + options.email)

        if not options.bot:
            # Get try slaves from PRESUBMIT.py files if not specified.
            # Even if the diff comes from options.url, use the local checkout for bot
            # selection.
            try:
                import presubmit_support
                root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py')
                if not change:
                    if not changed_files:
                        changed_files = checkouts[0].file_tuples
                    change = presubmit_support.Change(
                        options.name, '', checkouts[0].checkout_root,
                        changed_files, options.issue, options.patchset,
                        options.email)
                options.bot = presubmit_support.DoGetTrySlaves(
                    change, checkouts[0].GetFileNames(),
                    checkouts[0].checkout_root, root_presubmit,
                    options.project, options.verbose, sys.stdout)
            except ImportError:
                pass
            if options.testfilter:
                bots = set()
                for bot in options.bot:
                    assert ',' not in bot
                    if bot.endswith(':compile'):
                        # Skip over compile-only builders for now.
                        continue
                    bots.add(bot.split(':', 1)[0])
                options.bot = list(bots)

            # If no bot is specified, either the default pool will be selected or the
            # try server will refuse the job. Either case we don't need to interfere.

        if any('triggered' in b.split(':', 1)[0] for b in options.bot):
            print >> sys.stderr, (
                'ERROR You are trying to send a job to a triggered bot.  This type of'
                ' bot requires an\ninitial job from a parent (usually a builder).  '
                'Instead send your job to the parent.\nBot list: %s' %
                options.bot)
            return 1

        if options.print_bots:
            print 'Bots which would be used:'
            for bot in options.bot:
                print '  %s' % bot
            return 0

        # Send the patch.
        if options.send_patch:
            # If forced.
            options.send_patch(options)
            PrintSuccess(options)
            return 0
        try:
            if can_http:
                _SendChangeHTTP(options)
                PrintSuccess(options)
                return 0
        except NoTryServerAccess:
            if not can_svn:
                raise
        _SendChangeSVN(options)
        PrintSuccess(options)
        return 0
    except (InvalidScript, NoTryServerAccess), e:
        if swallow_exception:
            return 1
        print >> sys.stderr, e
        return 1
Пример #24
0
 def testFileRead_Unicode(self):
     with gclient_utils.temporary_file() as tmp:
         gclient_utils.FileWrite(tmp, 'foo ✔ bar')
         self.assertEqual('foo ✔ bar', gclient_utils.FileRead(tmp))
Пример #25
0
 def testTemporaryFile(self):
     with gclient_utils.temporary_file() as tmp:
         gclient_utils.FileWrite(tmp, 'test')
         self.assertEqual('test', gclient_utils.FileRead(tmp))
     self.assertFalse(os.path.exists(tmp))
Пример #26
0
def DoPresubmitChecks(change,
                      committing,
                      verbose,
                      output_stream,
                      input_stream,
                      default_presubmit,
                      may_prompt,
                      rietveld_obj):
  """Runs all presubmit checks that apply to the files in the change.

  This finds all PRESUBMIT.py files in directories enclosing the files in the
  change (up to the repository root) and calls the relevant entrypoint function
  depending on whether the change is being committed or uploaded.

  Prints errors, warnings and notifications.  Prompts the user for warnings
  when needed.

  Args:
    change: The Change object.
    committing: True if 'gcl commit' is running, False if 'gcl upload' is.
    verbose: Prints debug info.
    output_stream: A stream to write output from presubmit tests to.
    input_stream: A stream to read input from the user.
    default_presubmit: A default presubmit script to execute in any case.
    may_prompt: Enable (y/n) questions on warning or error.
    rietveld_obj: rietveld.Rietveld object.

  Warning:
    If may_prompt is true, output_stream SHOULD be sys.stdout and input_stream
    SHOULD be sys.stdin.

  Return:
    A PresubmitOutput object. Use output.should_continue() to figure out
    if there were errors or warnings and the caller should abort.
  """
  old_environ = os.environ
  try:
    # Make sure python subprocesses won't generate .pyc files.
    os.environ = os.environ.copy()
    os.environ['PYTHONDONTWRITEBYTECODE'] = '1'

    output = PresubmitOutput(input_stream, output_stream)
    if committing:
      output.write("Running presubmit commit checks ...\n")
    else:
      output.write("Running presubmit upload checks ...\n")
    start_time = time.time()
    presubmit_files = ListRelevantPresubmitFiles(
        change.AbsoluteLocalPaths(True), change.RepositoryRoot())
    if not presubmit_files and verbose:
      output.write("Warning, no PRESUBMIT.py found.\n")
    results = []
    executer = PresubmitExecuter(change, committing, rietveld_obj, verbose)
    if default_presubmit:
      if verbose:
        output.write("Running default presubmit script.\n")
      fake_path = os.path.join(change.RepositoryRoot(), 'PRESUBMIT.py')
      results += executer.ExecPresubmitScript(default_presubmit, fake_path)
    for filename in presubmit_files:
      filename = os.path.abspath(filename)
      if verbose:
        output.write("Running %s\n" % filename)
      # Accept CRLF presubmit script.
      presubmit_script = gclient_utils.FileRead(filename, 'rU')
      results += executer.ExecPresubmitScript(presubmit_script, filename)

    errors = []
    notifications = []
    warnings = []
    for result in results:
      if result.fatal:
        errors.append(result)
      elif result.should_prompt:
        warnings.append(result)
      else:
        notifications.append(result)

    output.write('\n')
    for name, items in (('Messages', notifications),
                        ('Warnings', warnings),
                        ('ERRORS', errors)):
      if items:
        output.write('** Presubmit %s **\n' % name)
        for item in items:
          item.handle(output)
          output.write('\n')

    total_time = time.time() - start_time
    if total_time > 1.0:
      output.write("Presubmit checks took %.1fs to calculate.\n\n" % total_time)

    if not errors:
      if not warnings:
        output.write('Presubmit checks passed.\n')
      elif may_prompt:
        output.prompt_yes_no('There were presubmit warnings. '
                            'Are you sure you wish to continue? (y/N): ')
      else:
        output.fail()

    global _ASKED_FOR_FEEDBACK
    # Ask for feedback one time out of 5.
    if (len(results) and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
      output.write("Was the presubmit check useful? Please send feedback "
                  "& hate mail to [email protected]!\n")
      _ASKED_FOR_FEEDBACK = True
    return output
  finally:
    os.environ = old_environ
Пример #27
0
    def RunOnDeps(self, command, args):
        """Runs a command on each dependency in a client and its dependencies.

    The module's dependencies are specified in its top-level DEPS files.

    Args:
      command: The command to use (e.g., 'status' or 'diff')
      args: list of str - extra arguments to add to the command line.

    Raises:
      Error: If the client has conflicting entries.
    """
        if not command in self.supported_commands:
            raise gclient_utils.Error("'%s' is an unsupported command" %
                                      command)

        # Check for revision overrides.
        revision_overrides = {}
        for revision in self._options.revisions:
            if revision.find("@") == -1:
                raise gclient_utils.Error(
                    "Specify the full dependency when specifying a revision number."
                )
            revision_elem = revision.split("@")
            # Disallow conflicting revs
            if revision_overrides.has_key(revision_elem[0]) and \
               revision_overrides[revision_elem[0]] != revision_elem[1]:
                raise gclient_utils.Error(
                    "Conflicting revision numbers specified.")
            revision_overrides[revision_elem[0]] = revision_elem[1]

        solutions = self.GetVar("solutions")
        if not solutions:
            raise gclient_utils.Error("No solution specified")

        # When running runhooks --force, there's no need to consult the SCM.
        # All known hooks are expected to run unconditionally regardless of working
        # copy state, so skip the SCM status check.
        run_scm = not (command == 'runhooks' and self._options.force)

        entries = {}
        entries_deps_content = {}
        file_list = []
        # Run on the base solutions first.
        for solution in solutions:
            name = solution["name"]
            deps_file = solution.get("deps_file", self._options.deps_file)
            if '/' in deps_file or '\\' in deps_file:
                raise gclient_utils.Error(
                    'deps_file name must not be a path, just a '
                    'filename.')
            if name in entries:
                raise gclient_utils.Error(
                    "solution %s specified more than once" % name)
            url = solution["url"]
            entries[name] = url
            if run_scm and url:
                self._options.revision = revision_overrides.get(name)
                scm = gclient_scm.CreateSCM(url, self._root_dir, name)
                scm.RunCommand(command, self._options, args, file_list)
                file_list = [os.path.join(name, f.strip()) for f in file_list]
                self._options.revision = None
            try:
                deps_content = gclient_utils.FileRead(
                    os.path.join(self._root_dir, name, deps_file))
            except IOError, e:
                if e.errno != errno.ENOENT:
                    raise
                deps_content = ""
            entries_deps_content[name] = deps_content
Пример #28
0
    def PrintRevInfo(self):
        """Output revision info mapping for the client and its dependencies. This
    allows the capture of a overall "revision" for the source tree that can
    be used to reproduce the same tree in the future. The actual output
    contains enough information (source paths, svn server urls and revisions)
    that it can be used either to generate external svn commands (without
    gclient) or as input to gclient's --rev option (with some massaging of
    the data).

    NOTE: Unlike RunOnDeps this does not require a local checkout and is run
    on the Pulse master. It MUST NOT execute hooks.

    Raises:
      Error: If the client has conflicting entries.
    """
        # Check for revision overrides.
        revision_overrides = {}
        for revision in self._options.revisions:
            if revision.find("@") < 0:
                raise gclient_utils.Error(
                    "Specify the full dependency when specifying a revision number."
                )
            revision_elem = revision.split("@")
            # Disallow conflicting revs
            if revision_overrides.has_key(revision_elem[0]) and \
               revision_overrides[revision_elem[0]] != revision_elem[1]:
                raise gclient_utils.Error(
                    "Conflicting revision numbers specified.")
            revision_overrides[revision_elem[0]] = revision_elem[1]

        solutions = self.GetVar("solutions")
        if not solutions:
            raise gclient_utils.Error("No solution specified")

        entries = {}
        entries_deps_content = {}

        # Inner helper to generate base url and rev tuple (including honoring
        # |revision_overrides|)
        def GetURLAndRev(name, original_url):
            url, revision = gclient_utils.SplitUrlRevision(original_url)
            if not revision:
                if revision_overrides.has_key(name):
                    return (url, revision_overrides[name])
                else:
                    scm = gclient_scm.CreateSCM(solution["url"],
                                                self._root_dir, name)
                    return (url, scm.revinfo(self._options, [], None))
            else:
                if revision_overrides.has_key(name):
                    return (url, revision_overrides[name])
                else:
                    return (url, revision)

        # Run on the base solutions first.
        for solution in solutions:
            name = solution["name"]
            if name in entries:
                raise gclient_utils.Error(
                    "solution %s specified more than once" % name)
            (url, rev) = GetURLAndRev(name, solution["url"])
            entries[name] = "%s@%s" % (url, rev)
            # TODO(aharper): SVN/SCMWrapper cleanup (non-local commandset)
            entries_deps_content[name] = gclient_scm.scm.SVN.Capture(
                ["cat",
                 "%s/%s@%s" % (url, self._options.deps_file, rev)],
                os.getcwd())

        # Process the dependencies next (sort alphanumerically to ensure that
        # containing directories get populated first and for readability)
        deps = self._ParseAllDeps(entries, entries_deps_content)
        deps_to_process = deps.keys()
        deps_to_process.sort()

        # First pass for direct dependencies.
        for d in deps_to_process:
            if type(deps[d]) == str:
                (url, rev) = GetURLAndRev(d, deps[d])
                entries[d] = "%s@%s" % (url, rev)

        # Second pass for inherited deps (via the From keyword)
        for d in deps_to_process:
            if type(deps[d]) != str:
                deps_parent_url = entries[deps[d].module_name]
                if deps_parent_url.find("@") < 0:
                    raise gclient_utils.Error(
                        "From %s missing revisioned url" % deps[d].module_name)
                content = gclient_utils.FileRead(
                    os.path.join(self._root_dir, deps[d].module_name,
                                 self._options.deps_file))
                sub_deps = self._ParseSolutionDeps(deps[d].module_name,
                                                   content, {})
                (url, rev) = GetURLAndRev(d, sub_deps[d])
                entries[d] = "%s@%s" % (url, rev)
        print(";\n\n".join(
            ["%s: %s" % (x, entries[x]) for x in sorted(entries.keys())]))
Пример #29
0
def DoPresubmitChecks(change, committing, verbose, output_stream, input_stream,
                      default_presubmit, may_prompt):
    """Runs all presubmit checks that apply to the files in the change.

  This finds all PRESUBMIT.py files in directories enclosing the files in the
  change (up to the repository root) and calls the relevant entrypoint function
  depending on whether the change is being committed or uploaded.

  Prints errors, warnings and notifications.  Prompts the user for warnings
  when needed.

  Args:
    change: The Change object.
    committing: True if 'gcl commit' is running, False if 'gcl upload' is.
    verbose: Prints debug info.
    output_stream: A stream to write output from presubmit tests to.
    input_stream: A stream to read input from the user.
    default_presubmit: A default presubmit script to execute in any case.
    may_prompt: Enable (y/n) questions on warning or error.

  Warning:
    If may_prompt is true, output_stream SHOULD be sys.stdout and input_stream
    SHOULD be sys.stdin.

  Return:
    True if execution can continue, False if not.
  """
    start_time = time.time()
    presubmit_files = ListRelevantPresubmitFiles(
        change.AbsoluteLocalPaths(True), change.RepositoryRoot())
    if not presubmit_files and verbose:
        output_stream.write("Warning, no presubmit.py found.\n")
    results = []
    executer = PresubmitExecuter(change, committing)
    if default_presubmit:
        if verbose:
            output_stream.write("Running default presubmit script.\n")
        fake_path = os.path.join(change.RepositoryRoot(), 'PRESUBMIT.py')
        results += executer.ExecPresubmitScript(default_presubmit, fake_path)
    for filename in presubmit_files:
        filename = os.path.abspath(filename)
        if verbose:
            output_stream.write("Running %s\n" % filename)
        # Accept CRLF presubmit script.
        presubmit_script = gclient_utils.FileRead(filename, 'rU')
        results += executer.ExecPresubmitScript(presubmit_script, filename)

    errors = []
    notifications = []
    warnings = []
    for result in results:
        if not result.IsFatal() and not result.ShouldPrompt():
            notifications.append(result)
        elif result.ShouldPrompt():
            warnings.append(result)
        else:
            errors.append(result)

    error_count = 0
    for name, items in (('Messages', notifications), ('Warnings', warnings),
                        ('ERRORS', errors)):
        if items:
            output_stream.write('** Presubmit %s **\n' % name)
            for item in items:
                if not item._Handle(
                        output_stream, input_stream, may_prompt=False):
                    error_count += 1
                output_stream.write('\n')

    total_time = time.time() - start_time
    if total_time > 1.0:
        print "Presubmit checks took %.1fs to calculate." % total_time

    if not errors and warnings and may_prompt:
        if not PromptYesNo(
                input_stream, output_stream, 'There were presubmit warnings. '
                'Are you sure you wish to continue? (y/N): '):
            error_count += 1

    global _ASKED_FOR_FEEDBACK
    # Ask for feedback one time out of 5.
    if (len(results) and random.randint(0, 4) == 0
            and not _ASKED_FOR_FEEDBACK):
        output_stream.write(
            "Was the presubmit check useful? Please send feedback "
            "& hate mail to [email protected]!\n")
        _ASKED_FOR_FEEDBACK = True
    return (error_count == 0)
Пример #30
0
  if change_info.Exists():
    text += (separator2 +
            '\n'.join([f[0] + f[1] for f in affected_files]) + '\n')
  else:
    text += ('\n'.join([f[0] + f[1] for f in affected_files]) + '\n' +
            separator2)
  text += '\n'.join([f[0] + f[1] for f in unaffected_files]) + '\n'

  handle, filename = tempfile.mkstemp(text=True)
  os.write(handle, text)
  os.close(handle)

  if not silent:
    os.system(GetEditor() + " " + filename)

  result = gclient_utils.FileRead(filename, 'r')
  os.remove(filename)

  if not result:
    return 0

  split_result = result.split(separator1, 1)
  if len(split_result) != 2:
    ErrorExit("Don't modify the text starting with ---!\n\n" + result)

  # Update the CL description if it has changed.
  new_description = split_result[0]
  cl_files_text = split_result[1]
  if new_description != description or override_description:
    change_info.description = new_description
    change_info.needs_upload = True