Ejemplo n.º 1
0
  def _DeleteOrMove(self, force):
    """Delete the checkout directory or move it out of the way.

    Args:
        force: bool; if True, delete the directory. Otherwise, just move it.
    """
    if force and os.environ.get('CHROME_HEADLESS') == '1':
      self.Print('_____ Conflicting directory found in %s. Removing.'
                 % self.checkout_path)
      gclient_utils.AddWarning('Conflicting directory %s deleted.'
                               % self.checkout_path)
      gclient_utils.rmtree(self.checkout_path)
    else:
      bad_scm_dir = os.path.join(self._root_dir, '_bad_scm',
                                 os.path.dirname(self.relpath))

      try:
        os.makedirs(bad_scm_dir)
      except OSError as e:
        if e.errno != errno.EEXIST:
          raise

      dest_path = tempfile.mkdtemp(
          prefix=os.path.basename(self.relpath),
          dir=bad_scm_dir)
      self.Print('_____ Conflicting directory found in %s. Moving to %s.'
                 % (self.checkout_path, dest_path))
      gclient_utils.AddWarning('Conflicting directory %s moved to %s.'
                               % (self.checkout_path, dest_path))
      shutil.move(self.checkout_path, dest_path)
Ejemplo n.º 2
0
    def populate(self, depth=None, shallow=False, bootstrap=False, verbose=False, ignore_lock=False):
        assert self.GetCachePath()
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        lockfile = Lockfile(self.mirror_path)
        if not ignore_lock:
            lockfile.lock()

        tempdir = None
        try:
            tempdir = self._ensure_bootstrapped(depth, bootstrap)
            rundir = tempdir or self.mirror_path
            self._fetch(rundir, verbose, depth)
        except RefsHeadsFailedToFetch:
            # This is a major failure, we need to clean and force a bootstrap.
            gclient_utils.rmtree(rundir)
            self.print(GIT_CACHE_CORRUPT_MESSAGE)
            tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
            assert tempdir
            self._fetch(tempdir or self.mirror_path, verbose, depth)
        finally:
            if tempdir:
                try:
                    if os.path.exists(self.mirror_path):
                        gclient_utils.rmtree(self.mirror_path)
                    os.rename(tempdir, self.mirror_path)
                except OSError as e:
                    # This is somehow racy on Windows.
                    # Catching OSError because WindowsError isn't portable and
                    # pylint complains.
                    self.print("Error moving %s to %s: %s" % (tempdir, self.mirror_path, str(e)))
            if not ignore_lock:
                lockfile.unlock()
Ejemplo n.º 3
0
    def _ensure_bootstrapped(self, depth, bootstrap, force=False):
        tempdir = None
        pack_dir = os.path.join(self.mirror_path, "objects", "pack")
        pack_files = []

        if os.path.isdir(pack_dir):
            pack_files = [f for f in os.listdir(pack_dir) if f.endswith(".pack")]

        should_bootstrap = force or not self.exists() or len(pack_files) > GC_AUTOPACKLIMIT
        if should_bootstrap:
            if self.exists():
                # Re-bootstrapping an existing mirror; preserve existing fetch spec.
                self._preserve_fetchspec()
            tempdir = tempfile.mkdtemp(prefix="_cache_tmp", suffix=self.basedir, dir=self.GetCachePath())
            bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
            if bootstrapped:
                # Bootstrap succeeded; delete previous cache, if any.
                gclient_utils.rmtree(self.mirror_path)
            elif not self.exists():
                # Bootstrap failed, no previous cache; start with a bare git dir.
                self.RunGit(["init", "--bare"], cwd=tempdir)
            else:
                # Bootstrap failed, previous cache exists; warn and continue.
                logging.warn(
                    "Git cache has a lot of pack files (%d).  Tried to re-bootstrap "
                    "but failed.  Continuing with non-optimized repository." % len(pack_files)
                )
                gclient_utils.rmtree(tempdir)
                tempdir = None
        else:
            if depth and os.path.exists(os.path.join(self.mirror_path, "shallow")):
                logging.warn("Shallow fetch requested, but repo cache already exists.")
        return tempdir
Ejemplo n.º 4
0
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False, ignore_lock=False, lock_timeout=0):
    assert self.GetCachePath()
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    lockfile = Lockfile(self.mirror_path, lock_timeout)
    if not ignore_lock:
      lockfile.lock()

    tempdir = None
    try:
      tempdir = self._ensure_bootstrapped(depth, bootstrap)
      rundir = tempdir or self.mirror_path
      self._fetch(rundir, verbose, depth)
    except ClobberNeeded:
      # This is a major failure, we need to clean and force a bootstrap.
      gclient_utils.rmtree(rundir)
      self.print(GIT_CACHE_CORRUPT_MESSAGE)
      tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
      assert tempdir
      self._fetch(tempdir or self.mirror_path, verbose, depth)
    finally:
      if tempdir:
        if os.path.exists(self.mirror_path):
          gclient_utils.rmtree(self.mirror_path)
        self.Rename(tempdir, self.mirror_path)
      if not ignore_lock:
        lockfile.unlock()
Ejemplo n.º 5
0
    def GenerateDiff(filenames, cwd, full_move, revision):
        """Returns a string containing the diff for the given file list.

    The files in the list should either be absolute paths or relative to the
    given root. If no root directory is provided, the repository root will be
    used.
    The diff will always use relative paths.
    """
        assert isinstance(filenames, (list, tuple))
        # If the user specified a custom diff command in their svn config file,
        # then it'll be used when we do svn diff, which we don't want to happen
        # since we want the unified diff.
        if SVN.AssertVersion("1.7")[0]:
            # On svn >= 1.7, the "--internal-diff" flag will solve this.
            return SVN._GenerateDiffInternal(filenames, cwd, full_move,
                                             revision,
                                             ["diff", "--internal-diff"],
                                             ["diff", "--internal-diff"])
        else:
            # On svn < 1.7, the "--internal-diff" flag doesn't exist.  Using
            # --diff-cmd=diff doesn't always work, since e.g. Windows cmd users may
            # not have a "diff" executable in their path at all.  So we use an empty
            # temporary directory as the config directory, which bypasses any user
            # settings for the diff-cmd.  However, we don't pass this for the
            # remote_safe_diff_command parameter, since when a new config-dir is
            # specified for an svn diff against a remote URL, it triggers
            # authentication prompts.  In this case there isn't really a good
            # alternative to svn 1.7's --internal-diff flag.
            bogus_dir = tempfile.mkdtemp()
            try:
                return SVN._GenerateDiffInternal(
                    filenames, cwd, full_move, revision,
                    ["diff", "--config-dir", bogus_dir], ["diff"])
            finally:
                gclient_utils.rmtree(bogus_dir)
Ejemplo n.º 6
0
  def _ensure_bootstrapped(self, depth, bootstrap, force=False):
    tempdir = None
    config_file = os.path.join(self.mirror_path, 'config')
    pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
    pack_files = []

    if os.path.isdir(pack_dir):
      pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')]

    should_bootstrap = (force or
                        not os.path.exists(config_file) or
                        len(pack_files) > GC_AUTOPACKLIMIT)
    if should_bootstrap:
      tempdir = tempfile.mkdtemp(
          prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath())
      bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
      if bootstrapped:
        # Bootstrap succeeded; delete previous cache, if any.
        gclient_utils.rmtree(self.mirror_path)
      elif not os.path.exists(config_file):
        # Bootstrap failed, no previous cache; start with a bare git dir.
        self.RunGit(['init', '--bare'], cwd=tempdir)
      else:
        # Bootstrap failed, previous cache exists; warn and continue.
        logging.warn(
            'Git cache has a lot of pack files (%d).  Tried to re-bootstrap '
            'but failed.  Continuing with non-optimized repository.'
            % len(pack_files))
        gclient_utils.rmtree(tempdir)
        tempdir = None
    else:
      if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
        logging.warn(
            'Shallow fetch requested, but repo cache already exists.')
    return tempdir
Ejemplo n.º 7
0
  def nuke(self):
    """Obliterates the git repo on disk.

    Causes this GitRepo to be unusable.
    """
    gclient_utils.rmtree(self.repo_path)
    self.repo_path = None
Ejemplo n.º 8
0
 def _clean():
     """Cleans the root trial directory."""
     if not TrialDir.SHOULD_LEAK:
         logging.debug('Removing %s' % TrialDir.TRIAL_ROOT)
         gclient_utils.rmtree(TrialDir.TRIAL_ROOT)
     else:
         logging.error('Leaking %s' % TrialDir.TRIAL_ROOT)
Ejemplo n.º 9
0
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False, ignore_lock=False, lock_timeout=0,
               reset_fetch_config=False):
    assert self.GetCachePath()
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    lockfile = Lockfile(self.mirror_path, lock_timeout)
    if not ignore_lock:
      lockfile.lock()

    tempdir = None
    try:
      tempdir = self._ensure_bootstrapped(depth, bootstrap)
      rundir = tempdir or self.mirror_path
      self._fetch(rundir, verbose, depth, reset_fetch_config)
    except ClobberNeeded:
      # This is a major failure, we need to clean and force a bootstrap.
      gclient_utils.rmtree(rundir)
      self.print(GIT_CACHE_CORRUPT_MESSAGE)
      tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
      assert tempdir
      self._fetch(tempdir, verbose, depth, reset_fetch_config)
    finally:
      if tempdir:
        if os.path.exists(self.mirror_path):
          gclient_utils.rmtree(self.mirror_path)
        self.Rename(tempdir, self.mirror_path)
      if not ignore_lock:
        lockfile.unlock()
Ejemplo n.º 10
0
    def GenerateDiff(filenames, cwd, full_move, revision):
        """Returns a string containing the diff for the given file list.

    The files in the list should either be absolute paths or relative to the
    given root. If no root directory is provided, the repository root will be
    used.
    The diff will always use relative paths.
    """
        assert isinstance(filenames, (list, tuple))
        # If the user specified a custom diff command in their svn config file,
        # then it'll be used when we do svn diff, which we don't want to happen
        # since we want the unified diff.
        if SVN.AssertVersion("1.7")[0]:
            # On svn >= 1.7, the "--internal-diff" flag will solve this.
            return SVN._GenerateDiffInternal(
                filenames, cwd, full_move, revision, ["diff", "--internal-diff"], ["diff", "--internal-diff"]
            )
        else:
            # On svn < 1.7, the "--internal-diff" flag doesn't exist.  Using
            # --diff-cmd=diff doesn't always work, since e.g. Windows cmd users may
            # not have a "diff" executable in their path at all.  So we use an empty
            # temporary directory as the config directory, which bypasses any user
            # settings for the diff-cmd.  However, we don't pass this for the
            # remote_safe_diff_command parameter, since when a new config-dir is
            # specified for an svn diff against a remote URL, it triggers
            # authentication prompts.  In this case there isn't really a good
            # alternative to svn 1.7's --internal-diff flag.
            bogus_dir = tempfile.mkdtemp()
            try:
                return SVN._GenerateDiffInternal(
                    filenames, cwd, full_move, revision, ["diff", "--config-dir", bogus_dir], ["diff"]
                )
            finally:
                gclient_utils.rmtree(bogus_dir)
Ejemplo n.º 11
0
    def populate(self,
                 depth=None,
                 no_fetch_tags=False,
                 shallow=False,
                 bootstrap=False,
                 verbose=False,
                 lock_timeout=0,
                 reset_fetch_config=False):
        assert self.GetCachePath()
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        with lockfile.lock(self.mirror_path, lock_timeout):
            try:
                self._ensure_bootstrapped(depth, bootstrap, reset_fetch_config)
                self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
                            reset_fetch_config)
            except ClobberNeeded:
                # This is a major failure, we need to clean and force a bootstrap.
                gclient_utils.rmtree(self.mirror_path)
                self.print(GIT_CACHE_CORRUPT_MESSAGE)
                self._ensure_bootstrapped(depth,
                                          bootstrap,
                                          reset_fetch_config,
                                          force=True)
                self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
                            reset_fetch_config)
Ejemplo n.º 12
0
  def bootstrap_repo(self, directory):
    """Bootstrap the repo from Google Stroage if possible."""

    python_fallback = False
    if sys.platform.startswith('win') and not self.FindExecutable('7z'):
      python_fallback = True
    elif sys.platform.startswith('darwin'):
      # The OSX version of unzip doesn't support zip64.
      python_fallback = True
    elif not self.FindExecutable('unzip'):
      python_fallback = True

    gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
    gsutil = Gsutil(
        self.gsutil_exe, boto_path=os.devnull, bypass_prodaccess=True)
    # Get the most recent version of the zipfile.
    _, ls_out, _ = gsutil.check_call('ls', gs_folder)
    ls_out_sorted = sorted(ls_out.splitlines())
    if not ls_out_sorted:
      # This repo is not on Google Storage.
      return False
    latest_checkout = ls_out_sorted[-1]

    # Download zip file to a temporary directory.
    try:
      tempdir = tempfile.mkdtemp()
      self.print('Downloading %s' % latest_checkout)
      code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
      if code:
        self.print('%s\n%s' % (out, err))
        return False
      filename = os.path.join(tempdir, latest_checkout.split('/')[-1])

      # Unpack the file with 7z on Windows, unzip on linux, or fallback.
      if not python_fallback:
        if sys.platform.startswith('win'):
          cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
        else:
          cmd = ['unzip', filename, '-d', directory]
        retcode = subprocess.call(cmd)
      else:
        try:
          with zipfile.ZipFile(filename, 'r') as f:
            f.printdir()
            f.extractall(directory)
        except Exception as e:
          self.print('Encountered error: %s' % str(e), file=sys.stderr)
          retcode = 1
        else:
          retcode = 0
    finally:
      # Clean up the downloaded zipfile.
      gclient_utils.rmtree(tempdir)

    if retcode:
      self.print(
          'Extracting bootstrap zipfile %s failed.\n'
          'Resuming normal operations.' % filename)
      return False
    return True
Ejemplo n.º 13
0
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False, ignore_lock=False):
    assert self.GetCachePath()
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    lockfile = Lockfile(self.mirror_path)
    if not ignore_lock:
      lockfile.lock()

    tempdir = None
    try:
      tempdir = self._ensure_bootstrapped(depth, bootstrap)
      rundir = tempdir or self.mirror_path
      self._fetch(rundir, verbose, depth)
    except RefsHeadsFailedToFetch:
      # This is a major failure, we need to clean and force a bootstrap.
      gclient_utils.rmtree(rundir)
      self.print(GIT_CACHE_CORRUPT_MESSAGE)
      tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
      assert tempdir
      self._fetch(tempdir or self.mirror_path, verbose, depth)
    finally:
      if tempdir:
        os.rename(tempdir, self.mirror_path)
      if not ignore_lock:
        lockfile.unlock()
Ejemplo n.º 14
0
 def _clean():
   """Cleans the root trial directory."""
   if not TrialDir.SHOULD_LEAK:
     logging.debug('Removing %s' % TrialDir.TRIAL_ROOT)
     gclient_utils.rmtree(TrialDir.TRIAL_ROOT)
   else:
     logging.error('Leaking %s' % TrialDir.TRIAL_ROOT)
Ejemplo n.º 15
0
  def bootstrap_repo(self, directory):
    """Bootstrap the repo from Google Stroage if possible.

    Requires 7z on Windows and Unzip on Linux/Mac.
    """
    if sys.platform.startswith('win'):
      if not self.FindExecutable('7z'):
        self.print('''
Cannot find 7z in the path.  If you want git cache to be able to bootstrap from
Google Storage, please install 7z from:

http://www.7-zip.org/download.html
''')
        return False
    else:
      if not self.FindExecutable('unzip'):
        self.print('''
Cannot find unzip in the path.  If you want git cache to be able to bootstrap
from Google Storage, please ensure unzip is present on your system.
''')
        return False

    gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
    gsutil = Gsutil(
        self.gsutil_exe, boto_path=os.devnull, bypass_prodaccess=True)
    # Get the most recent version of the zipfile.
    _, ls_out, _ = gsutil.check_call('ls', gs_folder)
    ls_out_sorted = sorted(ls_out.splitlines())
    if not ls_out_sorted:
      # This repo is not on Google Storage.
      return False
    latest_checkout = ls_out_sorted[-1]

    # Download zip file to a temporary directory.
    try:
      tempdir = tempfile.mkdtemp()
      self.print('Downloading %s' % latest_checkout)
      code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
      if code:
        self.print('%s\n%s' % (out, err))
        return False
      filename = os.path.join(tempdir, latest_checkout.split('/')[-1])

      # Unpack the file with 7z on Windows, or unzip everywhere else.
      if sys.platform.startswith('win'):
        cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
      else:
        cmd = ['unzip', filename, '-d', directory]
      retcode = subprocess.call(cmd)
    finally:
      # Clean up the downloaded zipfile.
      gclient_utils.rmtree(tempdir)

    if retcode:
      self.print(
          'Extracting bootstrap zipfile %s failed.\n'
          'Resuming normal operations.' % filename)
      return False
    return True
Ejemplo n.º 16
0
    def bootstrap_repo(self, directory):
        """Bootstrap the repo from Google Stroage if possible.

    Requires 7z on Windows and Unzip on Linux/Mac.
    """
        if sys.platform.startswith('win'):
            if not self.FindExecutable('7z'):
                self.print('''
Cannot find 7z in the path.  If you want git cache to be able to bootstrap from
Google Storage, please install 7z from:

http://www.7-zip.org/download.html
''')
                return False
        else:
            if not self.FindExecutable('unzip'):
                self.print('''
Cannot find unzip in the path.  If you want git cache to be able to bootstrap
from Google Storage, please ensure unzip is present on your system.
''')
                return False

        gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
        gsutil = Gsutil(self.gsutil_exe,
                        boto_path=os.devnull,
                        bypass_prodaccess=True)
        # Get the most recent version of the zipfile.
        _, ls_out, _ = gsutil.check_call('ls', gs_folder)
        ls_out_sorted = sorted(ls_out.splitlines())
        if not ls_out_sorted:
            # This repo is not on Google Storage.
            return False
        latest_checkout = ls_out_sorted[-1]

        # Download zip file to a temporary directory.
        try:
            tempdir = tempfile.mkdtemp()
            self.print('Downloading %s' % latest_checkout)
            code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
            if code:
                self.print('%s\n%s' % (out, err))
                return False
            filename = os.path.join(tempdir, latest_checkout.split('/')[-1])

            # Unpack the file with 7z on Windows, or unzip everywhere else.
            if sys.platform.startswith('win'):
                cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
            else:
                cmd = ['unzip', filename, '-d', directory]
            retcode = subprocess.call(cmd)
        finally:
            # Clean up the downloaded zipfile.
            gclient_utils.rmtree(tempdir)

        if retcode:
            self.print('Extracting bootstrap zipfile %s failed.\n'
                       'Resuming normal operations.' % filename)
            return False
        return True
Ejemplo n.º 17
0
def tempdir():
  tdir = None
  try:
    tdir = tempfile.mkdtemp(suffix='gerrit_util')
    yield tdir
  finally:
    if tdir:
      gclient_utils.rmtree(tdir)
Ejemplo n.º 18
0
 def tear_down(self):
     """Cleans the trial subdirectory for this instance."""
     if not self.leak:
         logging.debug('Removing %s' % self.root_dir)
         gclient_utils.rmtree(self.root_dir)
     else:
         logging.error('Leaking %s' % self.root_dir)
     self.root_dir = None
Ejemplo n.º 19
0
def tempdir():
    tdir = None
    try:
        tdir = tempfile.mkdtemp(suffix='gerrit_util')
        yield tdir
    finally:
        if tdir:
            gclient_utils.rmtree(tdir)
Ejemplo n.º 20
0
 def tear_down(self):
   """Cleans the trial subdirectory for this instance."""
   if not self.leak:
     logging.debug('Removing %s' % self.root_dir)
     gclient_utils.rmtree(self.root_dir)
   else:
     logging.error('Leaking %s' % self.root_dir)
   self.root_dir = None
Ejemplo n.º 21
0
    def Revert(cwd, callback=None, ignore_externals=False, no_ignore=False):
        """Reverts all svn modifications in cwd, including properties.

    Deletes any modified files or directory.

    A "svn update --revision BASE" call is required after to revive deleted
    files.
    """
        for file_status in SVN.CaptureStatus(None, cwd, no_ignore=no_ignore):
            file_path = os.path.join(cwd, file_status[1])
            if ignore_externals and file_status[0][0] == "X" and file_status[0][1:].isspace():
                # Ignore externals.
                logging.info("Ignoring external %s" % file_status[1])
                continue

            # This is the case where '! L    .' is returned by 'svn status'. Just
            # strip off the '/.'.
            if file_path.endswith(os.path.sep + "."):
                file_path = file_path[:-2]

            if callback:
                callback(file_status)

            if os.path.exists(file_path):
                # svn revert is really stupid. It fails on inconsistent line-endings,
                # on switched directories, etc. So take no chance and delete everything!
                # In theory, it wouldn't be necessary for property-only change but then
                # it'd have to look for switched directories, etc so it's not worth
                # optimizing this use case.
                if os.path.isfile(file_path) or os.path.islink(file_path):
                    logging.info("os.remove(%s)" % file_path)
                    os.remove(file_path)
                elif os.path.isdir(file_path):
                    logging.info("rmtree(%s)" % file_path)
                    gclient_utils.rmtree(file_path)
                else:
                    logging.critical(
                        (
                            "No idea what is %s.\nYou just found a bug in gclient"
                            ", please ping [email protected] ASAP!"
                        )
                        % file_path
                    )

            if file_status[0][0] in ("D", "A", "!") or not file_status[0][1:].isspace():
                # Added, deleted file requires manual intervention and require calling
                # revert, like for properties.
                if not os.path.isdir(cwd):
                    # '.' was deleted. It's not worth continuing.
                    return
                try:
                    SVN.Capture(["revert", file_status[1]], cwd=cwd)
                except subprocess2.CalledProcessError:
                    if not os.path.exists(file_path):
                        continue
                    raise
Ejemplo n.º 22
0
 def set_up(self):
   """All late initialization comes here."""
   # You can override self.TRIAL_ROOT.
   if not self.TRIAL_ROOT:
     # Was not yet initialized.
     TrialDir.TRIAL_ROOT = os.path.realpath(tempfile.mkdtemp(prefix='trial'))
     atexit.register(self._clean)
   self.root_dir = os.path.join(TrialDir.TRIAL_ROOT, self.subdir)
   gclient_utils.rmtree(self.root_dir)
   os.makedirs(self.root_dir)
Ejemplo n.º 23
0
    def Revert(cwd, callback=None, ignore_externals=False, no_ignore=False):
        """Reverts all svn modifications in cwd, including properties.

    Deletes any modified files or directory.

    A "svn update --revision BASE" call is required after to revive deleted
    files.
    """
        for file_status in SVN.CaptureStatus(None, cwd, no_ignore=no_ignore):
            file_path = os.path.join(cwd, file_status[1])
            if (ignore_externals and file_status[0][0] == 'X'
                    and file_status[0][1:].isspace()):
                # Ignore externals.
                logging.info('Ignoring external %s' % file_status[1])
                continue

            # This is the case where '! L    .' is returned by 'svn status'. Just
            # strip off the '/.'.
            if file_path.endswith(os.path.sep + '.'):
                file_path = file_path[:-2]

            if callback:
                callback(file_status)

            if os.path.exists(file_path):
                # svn revert is really stupid. It fails on inconsistent line-endings,
                # on switched directories, etc. So take no chance and delete everything!
                # In theory, it wouldn't be necessary for property-only change but then
                # it'd have to look for switched directories, etc so it's not worth
                # optimizing this use case.
                if os.path.isfile(file_path) or os.path.islink(file_path):
                    logging.info('os.remove(%s)' % file_path)
                    os.remove(file_path)
                elif os.path.isdir(file_path):
                    logging.info('rmtree(%s)' % file_path)
                    gclient_utils.rmtree(file_path)
                else:
                    logging.critical(
                        ('No idea what is %s.\nYou just found a bug in gclient'
                         ', please ping [email protected] ASAP!') %
                        file_path)

            if (file_status[0][0] in ('D', 'A', '!')
                    or not file_status[0][1:].isspace()):
                # Added, deleted file requires manual intervention and require calling
                # revert, like for properties.
                if not os.path.isdir(cwd):
                    # '.' was deleted. It's not worth continuing.
                    return
                try:
                    SVN.Capture(['revert', file_status[1]], cwd=cwd)
                except subprocess2.CalledProcessError:
                    if not os.path.exists(file_path):
                        continue
                    raise
Ejemplo n.º 24
0
    def _ensure_bootstrapped(self,
                             depth,
                             bootstrap,
                             reset_fetch_config,
                             force=False):
        pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
        pack_files = []
        if os.path.isdir(pack_dir):
            pack_files = [
                f for f in os.listdir(pack_dir) if f.endswith('.pack')
            ]
            self.print(
                '%s has %d .pack files, re-bootstrapping if >%d or ==0' %
                (self.mirror_path, len(pack_files), GC_AUTOPACKLIMIT))

        should_bootstrap = (force or not self.exists()
                            or len(pack_files) > GC_AUTOPACKLIMIT
                            or len(pack_files) == 0)

        if not should_bootstrap:
            if depth and os.path.exists(
                    os.path.join(self.mirror_path, 'shallow')):
                logging.warning(
                    'Shallow fetch requested, but repo cache already exists.')
            return

        if not self.exists():
            if os.path.exists(self.mirror_path):
                # If the mirror path exists but self.exists() returns false, we're
                # in an unexpected state. Nuke the previous mirror directory and
                # start fresh.
                gclient_utils.rmtree(self.mirror_path)
            os.mkdir(self.mirror_path)
        elif not reset_fetch_config:
            # Re-bootstrapping an existing mirror; preserve existing fetch spec.
            self._preserve_fetchspec()

        bootstrapped = (not depth and bootstrap
                        and self.bootstrap_repo(self.mirror_path))

        if not bootstrapped:
            if not self.exists() or not self.supported_project():
                # Bootstrap failed due to:
                # 1. No previous cache.
                # 2. Project doesn't have a bootstrap folder.
                # Start with a bare git dir.
                self.RunGit(['init', '--bare', '-b', 'main'],
                            cwd=self.mirror_path)
            else:
                # Bootstrap failed, previous cache exists; warn and continue.
                logging.warning(
                    'Git cache has a lot of pack files (%d). Tried to re-bootstrap '
                    'but failed. Continuing with non-optimized repository.' %
                    len(pack_files))
Ejemplo n.º 25
0
  def revert(self, options, args, file_list):
    """Reverts local modifications. Subversion specific.

    All reverted files will be appended to file_list, even if Subversion
    doesn't know about them.
    """
    if not os.path.isdir(self.checkout_path):
      if os.path.exists(self.checkout_path):
        gclient_utils.rmtree(self.checkout_path)
      # svn revert won't work if the directory doesn't exist. It needs to
      # checkout instead.
      print('\n_____ %s is missing, synching instead' % self.relpath)
      # Don't reuse the args.
      return self.update(options, [], file_list)

    if not os.path.isdir(os.path.join(self.checkout_path, '.svn')):
      if os.path.isdir(os.path.join(self.checkout_path, '.git')):
        print('________ found .git directory; skipping %s' % self.relpath)
        return
      if os.path.isdir(os.path.join(self.checkout_path, '.hg')):
        print('________ found .hg directory; skipping %s' % self.relpath)
        return
      if not options.force:
        raise gclient_utils.Error('Invalid checkout path, aborting')
      print(
          '\n_____ %s is not a valid svn checkout, synching instead' %
          self.relpath)
      gclient_utils.rmtree(self.checkout_path)
      # Don't reuse the args.
      return self.update(options, [], file_list)

    def printcb(file_status):
      file_list.append(file_status[1])
      if logging.getLogger().isEnabledFor(logging.INFO):
        logging.info('%s%s' % (file_status[0], file_status[1]))
      else:
        print(os.path.join(self.checkout_path, file_status[1]))
    scm.SVN.Revert(self.checkout_path, callback=printcb)

    # Revert() may delete the directory altogether.
    if not os.path.isdir(self.checkout_path):
      # Don't reuse the args.
      return self.update(options, [], file_list)

    try:
      # svn revert is so broken we don't even use it. Using
      # "svn up --revision BASE" achieve the same effect.
      # file_list will contain duplicates.
      self._RunAndGetFileList(['update', '--revision', 'BASE'], options,
          file_list)
    except OSError, e:
      # Maybe the directory disapeared meanwhile. Do not throw an exception.
      logging.error('Failed to update:\n%s' % str(e))
Ejemplo n.º 26
0
    def populate(self,
                 depth=None,
                 shallow=False,
                 bootstrap=False,
                 verbose=False):
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        v = []
        if verbose:
            v = ['-v', '--progress']

        d = []
        if depth:
            d = ['--depth', str(depth)]

        with Lockfile(self.mirror_path):
            # Setup from scratch if the repo is new or is in a bad state.
            tempdir = None
            if not os.path.exists(os.path.join(self.mirror_path, 'config')):
                gclient_utils.rmtree(self.mirror_path)
                tempdir = tempfile.mkdtemp(suffix=self.basedir,
                                           dir=self.GetCachePath())
                bootstrapped = not depth and bootstrap and self.bootstrap_repo(
                    tempdir)
                if not bootstrapped:
                    self.RunGit(['init', '--bare'], cwd=tempdir)
            else:
                if depth and os.path.exists(
                        os.path.join(self.mirror_path, 'shallow')):
                    logging.warn(
                        'Shallow fetch requested, but repo cache already exists.'
                    )
                d = []

            rundir = tempdir or self.mirror_path
            self.config(rundir)
            fetch_cmd = ['fetch'] + v + d + ['origin']
            fetch_specs = subprocess.check_output(
                [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
                cwd=rundir).strip().splitlines()
            for spec in fetch_specs:
                try:
                    self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
                except subprocess.CalledProcessError:
                    logging.warn('Fetch of %s failed' % spec)
            if tempdir:
                os.rename(tempdir, self.mirror_path)
Ejemplo n.º 27
0
    def bootstrap_repo(self, directory):
        """Bootstrap the repo from Google Storage if possible.

    More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing().
    """
        if not self.bootstrap_bucket:
            return False

        gsutil = Gsutil(self.gsutil_exe, boto_path=None)

        # Get the most recent version of the directory.
        # This is determined from the most recent version of a .ready file.
        # The .ready file is only uploaded when an entire directory has been
        # uploaded to GS.
        _, ls_out, ls_err = gsutil.check_call('ls', self._gs_path)
        ls_out_set = set(ls_out.strip().splitlines())
        latest_dir = self._GetMostRecentCacheDirectory(ls_out_set)

        if not latest_dir:
            self.print('No bootstrap file for %s found in %s, stderr:\n  %s' %
                       (self.mirror_path, self.bootstrap_bucket, '  '.join(
                           (ls_err or '').splitlines(True))))
            return False

        try:
            # create new temporary directory locally
            tempdir = tempfile.mkdtemp(prefix='_cache_tmp',
                                       dir=self.GetCachePath())
            self.RunGit(['init', '--bare'], cwd=tempdir)
            self.print('Downloading files in %s/* into %s.' %
                       (latest_dir, tempdir))
            with self.print_duration_of('download'):
                code = gsutil.call('-m', 'cp', '-r', latest_dir + "/*",
                                   tempdir)
            if code:
                return False
            # A quick validation that all references are valid.
            self.RunGit(['for-each-ref'], cwd=tempdir)
        except Exception as e:
            self.print('Encountered error: %s' % str(e), file=sys.stderr)
            gclient_utils.rmtree(tempdir)
            return False
        # delete the old directory
        if os.path.exists(directory):
            gclient_utils.rmtree(directory)
        self.Rename(tempdir, directory)
        return True
Ejemplo n.º 28
0
    def _ensure_bootstrapped(self, depth, bootstrap, force=False):
        tempdir = None
        pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
        pack_files = []

        if os.path.isdir(pack_dir):
            pack_files = [
                f for f in os.listdir(pack_dir) if f.endswith('.pack')
            ]
            self.print('%s has %d .pack files, re-bootstrapping if >%d' %
                       (self.mirror_path, len(pack_files), GC_AUTOPACKLIMIT))

        should_bootstrap = (force or not self.exists()
                            or len(pack_files) > GC_AUTOPACKLIMIT)
        if should_bootstrap:
            if self.exists():
                # Re-bootstrapping an existing mirror; preserve existing fetch spec.
                self._preserve_fetchspec()
            tempdir = tempfile.mkdtemp(prefix='_cache_tmp',
                                       suffix=self.basedir,
                                       dir=self.GetCachePath())
            bootstrapped = not depth and bootstrap and self.bootstrap_repo(
                tempdir)
            if bootstrapped:
                # Bootstrap succeeded; delete previous cache, if any.
                gclient_utils.rmtree(self.mirror_path)
            elif not self.exists() or not self.supported_project():
                # Bootstrap failed due to either
                # 1. No previous cache
                # 2. Project doesn't have a bootstrap zip file
                # Start with a bare git dir.
                self.RunGit(['init', '--bare'], cwd=tempdir)
            else:
                # Bootstrap failed, previous cache exists; warn and continue.
                logging.warn(
                    'Git cache has a lot of pack files (%d). Tried to re-bootstrap '
                    'but failed. Continuing with non-optimized repository.' %
                    len(pack_files))
                gclient_utils.rmtree(tempdir)
                tempdir = None
        else:
            if depth and os.path.exists(
                    os.path.join(self.mirror_path, 'shallow')):
                logging.warn(
                    'Shallow fetch requested, but repo cache already exists.')
        return tempdir
Ejemplo n.º 29
0
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False):
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    v = []
    if verbose:
      v = ['-v', '--progress']

    d = []
    if depth:
      d = ['--depth', str(depth)]


    with Lockfile(self.mirror_path):
      # Setup from scratch if the repo is new or is in a bad state.
      tempdir = None
      if not os.path.exists(os.path.join(self.mirror_path, 'config')):
        gclient_utils.rmtree(self.mirror_path)
        tempdir = tempfile.mkdtemp(
            suffix=self.basedir, dir=self.GetCachePath())
        bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
        if not bootstrapped:
          self.RunGit(['init', '--bare'], cwd=tempdir)
      else:
        if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
          logging.warn(
              'Shallow fetch requested, but repo cache already exists.')
        d = []

      rundir = tempdir or self.mirror_path
      self.config(rundir)
      fetch_cmd = ['fetch'] + v + d + ['origin']
      fetch_specs = subprocess.check_output(
          [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
          cwd=rundir).strip().splitlines()
      for spec in fetch_specs:
        try:
          self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
        except subprocess.CalledProcessError:
          logging.warn('Fetch of %s failed' % spec)
      if tempdir:
        os.rename(tempdir, self.mirror_path)
Ejemplo n.º 30
0
 def tear_down_svn(self):
   if self.svnserve:
     logging.debug('Killing svnserve pid %s' % self.svnserve.pid)
     try:
       self.svnserve.kill()
     except OSError, e:
       if e.errno != errno.ESRCH:   # no such process
         raise
     wait_for_port_to_free(self.host, self.svn_port)
     self.svnserve = None
     self.svn_port = None
     self.svn_base = None
     if not self.trial.SHOULD_LEAK:
       logging.debug('Removing %s' % self.svn_repo)
       gclient_utils.rmtree(self.svn_repo)
       logging.debug('Removing %s' % self.svn_checkout)
       gclient_utils.rmtree(self.svn_checkout)
     else:
       return False
Ejemplo n.º 31
0
 def tear_down_svn(self):
     if self.svnserve:
         logging.debug('Killing svnserve pid %s' % self.svnserve.pid)
         try:
             self.svnserve.kill()
         except OSError, e:
             if e.errno != errno.ESRCH:  # no such process
                 raise
         wait_for_port_to_free(self.host, self.svn_port)
         self.svnserve = None
         self.svn_port = None
         self.svn_base = None
         if not self.trial.SHOULD_LEAK:
             logging.debug('Removing %s' % self.svn_repo)
             gclient_utils.rmtree(self.svn_repo)
             logging.debug('Removing %s' % self.svn_checkout)
             gclient_utils.rmtree(self.svn_checkout)
         else:
             return False
Ejemplo n.º 32
0
  def _ensure_bootstrapped(self, depth, bootstrap, force=False):
    tempdir = None
    pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
    pack_files = []

    if os.path.isdir(pack_dir):
      pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')]
      self.print('%s has %d .pack files, re-bootstrapping if >%d' %
                 (self.mirror_path, len(pack_files), GC_AUTOPACKLIMIT))

    should_bootstrap = (force or
                        not self.exists() or
                        len(pack_files) > GC_AUTOPACKLIMIT)
    if should_bootstrap:
      if self.exists():
        # Re-bootstrapping an existing mirror; preserve existing fetch spec.
        self._preserve_fetchspec()
      tempdir = tempfile.mkdtemp(
          prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath())
      bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
      if bootstrapped:
        # Bootstrap succeeded; delete previous cache, if any.
        gclient_utils.rmtree(self.mirror_path)
      elif not self.exists() or not self.supported_project():
        # Bootstrap failed due to either
        # 1. No previous cache
        # 2. Project doesn't have a bootstrap zip file
        # Start with a bare git dir.
        self.RunGit(['init', '--bare'], cwd=tempdir)
      else:
        # Bootstrap failed, previous cache exists; warn and continue.
        logging.warn(
            'Git cache has a lot of pack files (%d). Tried to re-bootstrap '
            'but failed. Continuing with non-optimized repository.'
            % len(pack_files))
        gclient_utils.rmtree(tempdir)
        tempdir = None
    else:
      if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
        logging.warn(
            'Shallow fetch requested, but repo cache already exists.')
    return tempdir
Ejemplo n.º 33
0
    def populate(self,
                 depth=None,
                 shallow=False,
                 bootstrap=False,
                 verbose=False,
                 ignore_lock=False,
                 lock_timeout=0):
        assert self.GetCachePath()
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        lockfile = Lockfile(self.mirror_path, lock_timeout)
        if not ignore_lock:
            lockfile.lock()

        tempdir = None
        try:
            tempdir = self._ensure_bootstrapped(depth, bootstrap)
            rundir = tempdir or self.mirror_path
            self._fetch(rundir, verbose, depth)
        except RefsHeadsFailedToFetch:
            # This is a major failure, we need to clean and force a bootstrap.
            gclient_utils.rmtree(rundir)
            self.print(GIT_CACHE_CORRUPT_MESSAGE)
            tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
            assert tempdir
            self._fetch(tempdir or self.mirror_path, verbose, depth)
        finally:
            if tempdir:
                try:
                    if os.path.exists(self.mirror_path):
                        gclient_utils.rmtree(self.mirror_path)
                    os.rename(tempdir, self.mirror_path)
                except OSError as e:
                    # This is somehow racy on Windows.
                    # Catching OSError because WindowsError isn't portable and
                    # pylint complains.
                    self.print('Error moving %s to %s: %s' %
                               (tempdir, self.mirror_path, str(e)))
            if not ignore_lock:
                lockfile.unlock()
Ejemplo n.º 34
0
 def tear_down_git(self):
     if self.gitdaemon:
         logging.debug('Killing git-daemon pid %s' % self.gitdaemon.pid)
         self.gitdaemon.kill()
         self.gitdaemon = None
         if self.git_pid_file:
             pid = int(self.git_pid_file.read())
             self.git_pid_file.close()
             logging.debug('Killing git daemon pid %s' % pid)
             subprocess2.kill_pid(pid)
             self.git_pid_file = None
         wait_for_port_to_free(self.host, self.git_port)
         self.git_port = None
         self.git_base = None
         if not self.trial.SHOULD_LEAK:
             logging.debug('Removing %s' % self.git_root)
             gclient_utils.rmtree(self.git_root)
         else:
             return False
     return True
Ejemplo n.º 35
0
 def tear_down_git(self):
   if self.gitdaemon:
     logging.debug('Killing git-daemon pid %s' % self.gitdaemon.pid)
     self.gitdaemon.kill()
     self.gitdaemon = None
     if self.git_pid_file:
       pid = int(self.git_pid_file.read())
       self.git_pid_file.close()
       logging.debug('Killing git daemon pid %s' % pid)
       subprocess2.kill_pid(pid)
       self.git_pid_file = None
     wait_for_port_to_free(self.host, self.git_port)
     self.git_port = None
     self.git_base = None
     if not self.trial.SHOULD_LEAK:
       logging.debug('Removing %s' % self.git_root)
       gclient_utils.rmtree(self.git_root)
     else:
       return False
   return True
Ejemplo n.º 36
0
  def UnlockAll(cls):
    cachepath = cls.GetCachePath()
    if not cachepath:
      return
    dirlist = os.listdir(cachepath)
    repo_dirs = set([os.path.join(cachepath, path) for path in dirlist
                     if os.path.isdir(os.path.join(cachepath, path))])
    for dirent in dirlist:
      if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
        gclient_utils.rmtree(os.path.join(cachepath, dirent))
      elif (dirent.endswith('.lock') and
          os.path.isfile(os.path.join(cachepath, dirent))):
        repo_dirs.add(os.path.join(cachepath, dirent[:-5]))

    unlocked_repos = []
    for repo_dir in repo_dirs:
      if cls.BreakLocks(repo_dir):
        unlocked_repos.append(repo_dir)

    return unlocked_repos
Ejemplo n.º 37
0
    def UnlockAll(cls):
        cachepath = cls.GetCachePath()
        if not cachepath:
            return
        dirlist = os.listdir(cachepath)
        repo_dirs = set([
            os.path.join(cachepath, path) for path in dirlist
            if os.path.isdir(os.path.join(cachepath, path))
        ])
        for dirent in dirlist:
            if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
                gclient_utils.rmtree(os.path.join(cachepath, dirent))
            elif (dirent.endswith('.lock')
                  and os.path.isfile(os.path.join(cachepath, dirent))):
                repo_dirs.add(os.path.join(cachepath, dirent[:-5]))

        unlocked_repos = []
        for repo_dir in repo_dirs:
            if cls.BreakLocks(repo_dir):
                unlocked_repos.append(repo_dir)

        return unlocked_repos
Ejemplo n.º 38
0
 def tear_down_git(self):
   if self.gitdaemon:
     logging.debug('Killing git-daemon pid %s' % self.gitdaemon.pid)
     self.gitdaemon.kill()
     self.gitdaemon = None
     if self.git_pid_file_name:
       pid = int(open(self.git_pid_file_name).read())
       logging.debug('Killing git daemon pid %s' % pid)
       try:
         subprocess2.kill_pid(pid)
       except OSError as e:
         if e.errno != errno.ESRCH:  # no such process
           raise
       os.remove(self.git_pid_file_name)
       self.git_pid_file_name = None
     wait_for_port_to_free(self.host, self.git_port)
     self.git_port = None
     self.git_base = None
     if not self.trial.SHOULD_LEAK:
       logging.debug('Removing %s' % self.git_root)
       gclient_utils.rmtree(self.git_root)
     else:
       return False
   return True
Ejemplo n.º 39
0
 def tear_down_git(self):
     if self.gitdaemon:
         logging.debug("Killing git-daemon pid %s" % self.gitdaemon.pid)
         self.gitdaemon.kill()
         self.gitdaemon = None
         if self.git_pid_file:
             pid = int(self.git_pid_file.read())
             self.git_pid_file.close()
             logging.debug("Killing git daemon pid %s" % pid)
             try:
                 subprocess2.kill_pid(pid)
             except OSError as e:
                 if e.errno != errno.ESRCH:  # no such process
                     raise
             self.git_pid_file = None
         wait_for_port_to_free(self.host, self.git_port)
         self.git_port = None
         self.git_base = None
         if not self.trial.SHOULD_LEAK:
             logging.debug("Removing %s" % self.git_root)
             gclient_utils.rmtree(self.git_root)
         else:
             return False
     return True
Ejemplo n.º 40
0
def drover(options, args):
    revision = options.revert or options.merge

    # Initialize some variables used below. They can be overwritten by
    # the drover.properties file.
    BASE_URL = "svn://svn.chromium.org/chrome"
    TRUNK_URL = BASE_URL + "/trunk/src"
    BRANCH_URL = BASE_URL + "/branches/$branch/src"
    SKIP_CHECK_WORKING = True
    PROMPT_FOR_AUTHOR = False

    DEFAULT_WORKING = "drover_" + str(revision)
    if options.branch:
        DEFAULT_WORKING += ("_" + options.branch)

    if not isMinimumSVNVersion(1, 5):
        print "You need to use at least SVN version 1.5.x"
        return 1

    # Override the default properties if there is a drover.properties file.
    global file_pattern_
    if os.path.exists("drover.properties"):
        FILE_PATTERN = file_pattern_
        f = open("drover.properties")
        exec(f)
        f.close()
        if FILE_PATTERN:
            file_pattern_ = FILE_PATTERN

    if options.revert and options.branch:
        url = BRANCH_URL.replace("$branch", options.branch)
    elif options.merge and options.sbranch:
        url = BRANCH_URL.replace("$branch", options.sbranch)
    else:
        url = TRUNK_URL

    working = options.workdir or DEFAULT_WORKING

    if options.local:
        working = os.getcwd()
        if not inCheckoutRoot(working):
            print "'%s' appears not to be the root of a working copy" % working
            return 1
        if (isSVNDirty() and not prompt(
                "Working copy contains uncommitted files. Continue?")):
            return 1

    command = 'svn log ' + url + " -r " + str(revision) + " -v"
    os.system(command)

    if not (options.revertbot or prompt("Is this the correct revision?")):
        return 0

    if (os.path.exists(working)) and not options.local:
        if not (options.revertbot or SKIP_CHECK_WORKING or prompt(
                "Working directory: '%s' already exists, clobber?" % working)):
            return 0
        gclient_utils.rmtree(working)

    if not options.local:
        os.makedirs(working)
        os.chdir(working)

    if options.merge:
        action = "Merge"
        if not options.local:
            branch_url = BRANCH_URL.replace("$branch", options.branch)
            # Checkout everything but stuff that got added into a new dir
            checkoutRevision(url, revision, branch_url)
        # Merge everything that changed
        mergeRevision(url, revision)
        # "Export" files that were added from the source and add them to branch
        exportRevision(url, revision)
        # Delete directories that were deleted (file deletes are handled in the
        # merge).
        deleteRevision(url, revision)
    elif options.revert:
        action = "Revert"
        if options.branch:
            url = BRANCH_URL.replace("$branch", options.branch)
        checkoutRevision(url, revision, url, True)
        revertRevision(url, revision)
        revertExportRevision(url, revision)

    # Check the base url so we actually find the author who made the change
    if options.auditor:
        author = options.auditor
    else:
        author = getAuthor(url, revision)
        if not author:
            author = getAuthor(TRUNK_URL, revision)

    filename = str(revision) + ".txt"
    out = open(filename, "w")
    out.write(action + " " + str(revision) + " - ")
    out.write(getRevisionLog(url, revision))
    if (author):
        out.write("\nTBR=" + author)
    out.close()

    change_cmd = 'change ' + str(revision) + " " + filename
    if options.revertbot:
        change_cmd += ' --silent'
    runGcl(change_cmd)
    os.unlink(filename)

    if options.local:
        return 0

    print author
    print revision
    print("gcl upload " + str(revision) +
          " --send_mail --no_presubmit --reviewers=" + author)

    if options.revertbot or prompt("Would you like to upload?"):
        if PROMPT_FOR_AUTHOR:
            author = text_prompt(
                "Enter new author or press enter to accept default", author)
        if options.revertbot and options.revertbot_reviewers:
            author += ","
            author += options.revertbot_reviewers
        gclUpload(revision, author)
    else:
        print "Deleting the changelist."
        print "gcl delete " + str(revision)
        runGcl("delete " + str(revision))
        return 0

    # We commit if the reverbot is set to commit automatically, or if this is
    # not the revertbot and the user agrees.
    if options.revertbot_commit or (not options.revertbot
                                    and prompt("Would you like to commit?")):
        print "gcl commit " + str(revision) + " --no_presubmit --force"
        return runGcl("commit " + str(revision) + " --no_presubmit --force")
    else:
        return 0
Ejemplo n.º 41
0
    def update(self, options, args, file_list):
        """Runs svn to update or transparently checkout the working copy.

    All updated files will be appended to file_list.

    Raises:
      Error: if can't get URL for relative path.
    """
        # Only update if git or hg is not controlling the directory.
        git_path = os.path.join(self.checkout_path, '.git')
        if os.path.exists(git_path):
            print('________ found .git directory; skipping %s' % self.relpath)
            return

        hg_path = os.path.join(self.checkout_path, '.hg')
        if os.path.exists(hg_path):
            print('________ found .hg directory; skipping %s' % self.relpath)
            return

        if args:
            raise gclient_utils.Error("Unsupported argument(s): %s" %
                                      ",".join(args))

        # revision is the revision to match. It is None if no revision is specified,
        # i.e. the 'deps ain't pinned'.
        url, revision = gclient_utils.SplitUrlRevision(self.url)
        # Keep the original unpinned url for reference in case the repo is switched.
        base_url = url
        managed = True
        if options.revision:
            # Override the revision number.
            revision = str(options.revision)
        if revision:
            if revision != 'unmanaged':
                forced_revision = True
                # Reconstruct the url.
                url = '%s@%s' % (url, revision)
                rev_str = ' at %s' % revision
            else:
                managed = False
                revision = None
        else:
            forced_revision = False
            rev_str = ''

        if not os.path.exists(self.checkout_path):
            # We need to checkout.
            command = ['checkout', url, self.checkout_path]
            command = self._AddAdditionalUpdateFlags(command, options,
                                                     revision)
            self._RunAndGetFileList(command, options, file_list,
                                    self._root_dir)
            return

        if not managed:
            print('________ unmanaged solution; skipping %s' % self.relpath)
            return

        # Get the existing scm url and the revision number of the current checkout.
        try:
            from_info = scm.SVN.CaptureInfo(
                os.path.join(self.checkout_path, '.'))
        except (gclient_utils.Error, subprocess2.CalledProcessError):
            raise gclient_utils.Error((
                'Can\'t update/checkout %s if an unversioned directory is present. '
                'Delete the directory and try again.') % self.checkout_path)

        if 'URL' not in from_info:
            raise gclient_utils.Error(
                ('gclient is confused. Couldn\'t get the url for %s.\n'
                 'Try using @unmanaged.\n%s') %
                (self.checkout_path, from_info))

        # Look for locked directories.
        dir_info = scm.SVN.CaptureStatus(os.path.join(self.checkout_path, '.'))
        if any(d[0][2] == 'L' for d in dir_info):
            try:
                self._Run(['cleanup', self.checkout_path], options)
            except subprocess2.CalledProcessError, e:
                # Get the status again, svn cleanup may have cleaned up at least
                # something.
                dir_info = scm.SVN.CaptureStatus(
                    os.path.join(self.checkout_path, '.'))

                # Try to fix the failures by removing troublesome files.
                for d in dir_info:
                    if d[0][2] == 'L':
                        if d[0][0] == '!' and options.force:
                            print 'Removing troublesome path %s' % d[1]
                            gclient_utils.rmtree(d[1])
                        else:
                            print 'Not removing troublesome path %s automatically.' % d[
                                1]
                            if d[0][0] == '!':
                                print 'You can pass --force to enable automatic removal.'
                            raise e
Ejemplo n.º 42
0
    def bootstrap_repo(self, directory):
        """Bootstrap the repo from Google Stroage if possible.

    More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing().
    """

        python_fallback = False
        if sys.platform.startswith('win') and not self.FindExecutable('7z'):
            python_fallback = True
        elif sys.platform.startswith('darwin'):
            # The OSX version of unzip doesn't support zip64.
            python_fallback = True
        elif not self.FindExecutable('unzip'):
            python_fallback = True

        gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
        gsutil = Gsutil(self.gsutil_exe,
                        boto_path=None,
                        bypass_prodaccess=True)
        # Get the most recent version of the zipfile.
        _, ls_out, _ = gsutil.check_call('ls', gs_folder)
        ls_out_sorted = sorted(ls_out.splitlines())
        if not ls_out_sorted:
            # This repo is not on Google Storage.
            return False
        latest_checkout = ls_out_sorted[-1]

        # Download zip file to a temporary directory.
        try:
            tempdir = tempfile.mkdtemp(prefix='_cache_tmp',
                                       dir=self.GetCachePath())
            self.print('Downloading %s' % latest_checkout)
            code = gsutil.call('cp', latest_checkout, tempdir)
            if code:
                return False
            filename = os.path.join(tempdir, latest_checkout.split('/')[-1])

            # Unpack the file with 7z on Windows, unzip on linux, or fallback.
            if not python_fallback:
                if sys.platform.startswith('win'):
                    cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
                else:
                    cmd = ['unzip', filename, '-d', directory]
                retcode = subprocess.call(cmd)
            else:
                try:
                    with zipfile.ZipFile(filename, 'r') as f:
                        f.printdir()
                        f.extractall(directory)
                except Exception as e:
                    self.print('Encountered error: %s' % str(e),
                               file=sys.stderr)
                    retcode = 1
                else:
                    retcode = 0
        finally:
            # Clean up the downloaded zipfile.
            gclient_utils.rmtree(tempdir)

        if retcode:
            self.print('Extracting bootstrap zipfile %s failed.\n'
                       'Resuming normal operations.' % filename)
            return False
        return True
Ejemplo n.º 43
0
def drover(options, args):
  revision = options.revert or options.merge

  # Initialize some variables used below. They can be overwritten by
  # the drover.properties file.
  BASE_URL = "svn://svn.chromium.org/chrome"
  TRUNK_URL = BASE_URL + "/trunk/src"
  BRANCH_URL = BASE_URL + "/branches/$branch/src"
  SKIP_CHECK_WORKING = True
  PROMPT_FOR_AUTHOR = False

  # Translate a given milestone to the appropriate branch number.
  if options.milestone:
    options.branch = getBranchForMilestone(options.milestone)
    if not options.branch:
      return 1

  DEFAULT_WORKING = "drover_" + str(revision)
  if options.branch:
    DEFAULT_WORKING += ("_" + options.branch)

  if not isMinimumSVNVersion(1, 5):
    print "You need to use at least SVN version 1.5.x"
    return 1

  # Override the default properties if there is a drover.properties file.
  global file_pattern_
  if os.path.exists("drover.properties"):
    FILE_PATTERN = file_pattern_
    f = open("drover.properties")
    exec(f)
    f.close()
    if FILE_PATTERN:
      file_pattern_ = FILE_PATTERN

  if options.revert and options.branch:
    url = BRANCH_URL.replace("$branch", options.branch)
  elif options.merge and options.sbranch:
    url = BRANCH_URL.replace("$branch", options.sbranch)
  else:
    url = TRUNK_URL

  working = options.workdir or DEFAULT_WORKING

  if options.local:
    working = os.getcwd()
    if not inCheckoutRoot(working):
      print "'%s' appears not to be the root of a working copy" % working
      return 1
    if (isSVNDirty() and not
        prompt("Working copy contains uncommitted files. Continue?")):
      return 1

  command = 'svn log ' + url + " -r "+str(revision) + " -v"
  os.system(command)

  if not (options.revertbot or prompt("Is this the correct revision?")):
    return 0

  if (os.path.exists(working)) and not options.local:
    if not (options.revertbot or SKIP_CHECK_WORKING or
        prompt("Working directory: '%s' already exists, clobber?" % working)):
      return 0
    gclient_utils.rmtree(working)

  if not options.local:
    os.makedirs(working)
    os.chdir(working)

  if options.merge:
    action = "Merge"
    if not options.local:
      branch_url = BRANCH_URL.replace("$branch", options.branch)
      # Checkout everything but stuff that got added into a new dir
      checkoutRevision(url, revision, branch_url)
    # Merge everything that changed
    mergeRevision(url, revision)
    # "Export" files that were added from the source and add them to branch
    exportRevision(url, revision)
    # Delete directories that were deleted (file deletes are handled in the
    # merge).
    deleteRevision(url, revision)
  elif options.revert:
    action = "Revert"
    if options.branch:
      url = BRANCH_URL.replace("$branch", options.branch)
    checkoutRevision(url, revision, url, True)
    revertRevision(url, revision)
    revertExportRevision(url, revision)

  # Check the base url so we actually find the author who made the change
  if options.auditor:
    author = options.auditor
  else:
    author = getAuthor(url, revision)
    if not author:
      author = getAuthor(TRUNK_URL, revision)

  filename = str(revision)+".txt"
  out = open(filename,"w")
  out.write(action +" " + str(revision) + " - ")
  out.write(getRevisionLog(url, revision))
  if (author):
    out.write("\nTBR=" + author)
  out.close()

  change_cmd = 'change ' + str(revision) + " " + filename
  if options.revertbot:
    if sys.platform == 'win32':
      os.environ['SVN_EDITOR'] = 'cmd.exe /c exit'
    else:
      os.environ['SVN_EDITOR'] = 'true'
  runGcl(change_cmd)
  os.unlink(filename)

  if options.local:
    return 0

  print author
  print revision
  print ("gcl upload " + str(revision) +
         " --send_mail --no_presubmit --reviewers=" + author)

  if options.revertbot or prompt("Would you like to upload?"):
    if PROMPT_FOR_AUTHOR:
      author = text_prompt("Enter new author or press enter to accept default",
                           author)
    if options.revertbot and options.revertbot_reviewers:
      author += ","
      author += options.revertbot_reviewers
    gclUpload(revision, author)
  else:
    print "Deleting the changelist."
    print "gcl delete " + str(revision)
    runGcl("delete " + str(revision))
    return 0

  # We commit if the reverbot is set to commit automatically, or if this is
  # not the revertbot and the user agrees.
  if options.revertbot_commit or (not options.revertbot and
                                  prompt("Would you like to commit?")):
    print "gcl commit " + str(revision) + " --no_presubmit --force"
    return runGcl("commit " + str(revision) + " --no_presubmit --force")
  else:
    return 0
Ejemplo n.º 44
0
def drover(options, args):
  revision = options.revert or options.merge

  # Initialize some variables used below. They can be overwritten by
  # the drover.properties file.
  BASE_URL = "svn://svn.chromium.org/chrome"
  REVERT_ALT_URLS = ['svn://svn.chromium.org/blink',
                     'svn://svn.chromium.org/chrome-internal',
                     'svn://svn.chromium.org/native_client']
  TRUNK_URL = BASE_URL + "/trunk/src"
  BRANCH_URL = BASE_URL + "/branches/$branch/src"
  SKIP_CHECK_WORKING = True
  PROMPT_FOR_AUTHOR = False
  NO_ALT_URLS = options.no_alt_urls

  DEFAULT_WORKING = "drover_" + str(revision)
  if options.branch:
    DEFAULT_WORKING += ("_" + options.branch)

  if not isMinimumSVNVersion(1, 5):
    print "You need to use at least SVN version 1.5.x"
    return 1

  # Override the default properties if there is a drover.properties file.
  global file_pattern_
  if os.path.exists("drover.properties"):
    print 'Using options from %s' % os.path.join(
        os.getcwd(), 'drover.properties')
    FILE_PATTERN = file_pattern_
    f = open("drover.properties")
    exec(f)
    f.close()
    if FILE_PATTERN:
      file_pattern_ = FILE_PATTERN
    NO_ALT_URLS = True

  if options.revert and options.branch:
    print 'Note: --branch is usually not needed for reverts.'
    url = BRANCH_URL.replace("$branch", options.branch)
  elif options.merge and options.sbranch:
    url = BRANCH_URL.replace("$branch", options.sbranch)
  elif options.revert:
    url = options.url or BASE_URL
    file_pattern_ = r"[ ]+([MADUC])[ ]+((/.*)/(.*))"
  else:
    url = TRUNK_URL

  working = options.workdir or DEFAULT_WORKING

  if options.local:
    working = os.getcwd()
    if not inCheckoutRoot(working):
      print "'%s' appears not to be the root of a working copy" % working
      return 1
    if (isSVNDirty() and not
        prompt("Working copy contains uncommitted files. Continue?")):
      return 1

  if options.revert and not NO_ALT_URLS and not options.url:
    for cur_url in [url] + REVERT_ALT_URLS:
      try:
        commit_date_str = getSVNInfo(
            cur_url, options.revert).get('Last Changed Date', 'x').split()[0]
        commit_date = datetime.datetime.strptime(commit_date_str, '%Y-%m-%d')
        if (datetime.datetime.now() - commit_date).days < 180:
          if cur_url != url:
            print 'Guessing svn repo: %s.' % cur_url,
            print 'Use --no-alt-urls to disable heuristic.'
            url = cur_url
          break
      except ValueError:
        pass
  command = 'svn log ' + url + " -r "+str(revision) + " -v"
  os.system(command)

  if not (options.revertbot or prompt("Is this the correct revision?")):
    return 0

  if (os.path.exists(working)) and not options.local:
    if not (options.revertbot or SKIP_CHECK_WORKING or
        prompt("Working directory: '%s' already exists, clobber?" % working)):
      return 0
    gclient_utils.rmtree(working)

  if not options.local:
    os.makedirs(working)
    os.chdir(working)

  if options.merge:
    action = "Merge"
    if not options.local:
      branch_url = BRANCH_URL.replace("$branch", options.branch)
      # Checkout everything but stuff that got added into a new dir
      checkoutRevision(url, revision, branch_url)
    # Merge everything that changed
    mergeRevision(url, revision)
    # "Export" files that were added from the source and add them to branch
    exportRevision(url, revision)
    # Delete directories that were deleted (file deletes are handled in the
    # merge).
    deleteRevision(url, revision)
  elif options.revert:
    action = "Revert"
    pop_em = not options.url
    checkoutRevision(url, revision, url, True, pop_em)
    revertRevision(url, revision)
    revertExportRevision(url, revision)

  # Check the base url so we actually find the author who made the change
  if options.auditor:
    author = options.auditor
  else:
    author = getAuthor(url, revision)
    if not author:
      author = getAuthor(TRUNK_URL, revision)

  # Check that the author of the CL is different than the user making
  # the revert.  If they're the same, then we'll want to prompt the user
  # for a different reviewer to TBR.
  current_users = getCurrentSVNUsers(BASE_URL)
  is_self_revert = options.revert and author in current_users

  filename = str(revision)+".txt"
  out = open(filename,"w")
  drover_title = '%s %s' % (action, revision)
  revision_log = getRevisionLog(url, revision).splitlines()
  if revision_log:
    commit_title = revision_log[0]
    # Limit title to 68 chars so git log --oneline is <80 chars.
    max_commit_title = 68 - (len(drover_title) + 3)
    if len(commit_title) > max_commit_title:
      commit_title = commit_title[:max_commit_title-3] + '...'
    drover_title += ' "%s"' % commit_title
  out.write(drover_title + '\n\n')
  for line in revision_log:
    out.write('> %s\n' % line)
  if author:
    out.write("\nTBR=" + author)
  out.close()

  change_cmd = 'change ' + str(revision) + " " + filename
  if options.revertbot:
    if sys.platform == 'win32':
      os.environ['SVN_EDITOR'] = 'cmd.exe /c exit'
    else:
      os.environ['SVN_EDITOR'] = 'true'
  runGcl(change_cmd)
  os.unlink(filename)

  if options.local:
    return 0

  print author
  print revision
  print ("gcl upload " + str(revision) +
         " --send_mail --no_presubmit --reviewers=" + author)

  if options.revertbot or prompt("Would you like to upload?"):
    if PROMPT_FOR_AUTHOR or is_self_revert:
      author = text_prompt("Enter new author or press enter to accept default",
                           author)
    if options.revertbot and options.revertbot_reviewers:
      author += ","
      author += options.revertbot_reviewers
    gclUpload(revision, author)
  else:
    print "Deleting the changelist."
    print "gcl delete " + str(revision)
    runGcl("delete " + str(revision))
    return 0

  # We commit if the reverbot is set to commit automatically, or if this is
  # not the revertbot and the user agrees.
  if options.revertbot_commit or (not options.revertbot and
                                  prompt("Would you like to commit?")):
    print "gcl commit " + str(revision) + " --no_presubmit --force"
    return runGcl("commit " + str(revision) + " --no_presubmit --force")
  else:
    return 0
Ejemplo n.º 45
0
  def update(self, options, args, file_list):
    """Runs git to update or transparently checkout the working copy.

    All updated files will be appended to file_list.

    Raises:
      Error: if can't get URL for relative path.
    """
    if args:
      raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args))

    self._CheckMinVersion("1.6.6")

    # If a dependency is not pinned, track the default remote branch.
    default_rev = 'refs/remotes/%s/master' % self.remote
    url, deps_revision = gclient_utils.SplitUrlRevision(self.url)
    revision = deps_revision
    managed = True
    if options.revision:
      # Override the revision number.
      revision = str(options.revision)
    if revision == 'unmanaged':
      # Check again for a revision in case an initial ref was specified
      # in the url, for example bla.git@refs/heads/custombranch
      revision = deps_revision
      managed = False
    if not revision:
      revision = default_rev

    if managed:
      self._DisableHooks()

    printed_path = False
    verbose = []
    if options.verbose:
      self.Print('_____ %s at %s' % (self.relpath, revision), timestamp=False)
      verbose = ['--verbose']
      printed_path = True

    remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
    if remote_ref:
      # Rewrite remote refs to their local equivalents.
      revision = ''.join(remote_ref)
      rev_type = "branch"
    elif revision.startswith('refs/'):
      # Local branch? We probably don't want to support, since DEPS should
      # always specify branches as they are in the upstream repo.
      rev_type = "branch"
    else:
      # hash is also a tag, only make a distinction at checkout
      rev_type = "hash"

    mirror = self._GetMirror(url, options)
    if mirror:
      url = mirror.mirror_path

    # If we are going to introduce a new project, there is a possibility that
    # we are syncing back to a state where the project was originally a
    # sub-project rolled by DEPS (realistic case: crossing the Blink merge point
    # syncing backwards, when Blink was a DEPS entry and not part of src.git).
    # In such case, we might have a backup of the former .git folder, which can
    # be used to avoid re-fetching the entire repo again (useful for bisects).
    backup_dir = self.GetGitBackupDirPath()
    target_dir = os.path.join(self.checkout_path, '.git')
    if os.path.exists(backup_dir) and not os.path.exists(target_dir):
      gclient_utils.safe_makedirs(self.checkout_path)
      os.rename(backup_dir, target_dir)
      # Reset to a clean state
      self._Scrub('HEAD', options)

    if (not os.path.exists(self.checkout_path) or
        (os.path.isdir(self.checkout_path) and
         not os.path.exists(os.path.join(self.checkout_path, '.git')))):
      if mirror:
        self._UpdateMirrorIfNotContains(mirror, options, rev_type, revision)
      try:
        self._Clone(revision, url, options)
      except subprocess2.CalledProcessError:
        self._DeleteOrMove(options.force)
        self._Clone(revision, url, options)
      if file_list is not None:
        files = self._Capture(['ls-files']).splitlines()
        file_list.extend([os.path.join(self.checkout_path, f) for f in files])
      if not verbose:
        # Make the output a little prettier. It's nice to have some whitespace
        # between projects when cloning.
        self.Print('')
      return self._Capture(['rev-parse', '--verify', 'HEAD'])

    if not managed:
      self._UpdateBranchHeads(options, fetch=False)
      self.Print('________ unmanaged solution; skipping %s' % self.relpath)
      return self._Capture(['rev-parse', '--verify', 'HEAD'])

    self._maybe_break_locks(options)

    if mirror:
      self._UpdateMirrorIfNotContains(mirror, options, rev_type, revision)

    # See if the url has changed (the unittests use git://foo for the url, let
    # that through).
    current_url = self._Capture(['config', 'remote.%s.url' % self.remote])
    return_early = False
    # TODO(maruel): Delete url != 'git://foo' since it's just to make the
    # unit test pass. (and update the comment above)
    # Skip url auto-correction if remote.origin.gclient-auto-fix-url is set.
    # This allows devs to use experimental repos which have a different url
    # but whose branch(s) are the same as official repos.
    if (current_url.rstrip('/') != url.rstrip('/') and
        url != 'git://foo' and
        subprocess2.capture(
            ['git', 'config', 'remote.%s.gclient-auto-fix-url' % self.remote],
            cwd=self.checkout_path).strip() != 'False'):
      self.Print('_____ switching %s to a new upstream' % self.relpath)
      if not (options.force or options.reset):
        # Make sure it's clean
        self._CheckClean(revision)
      # Switch over to the new upstream
      self._Run(['remote', 'set-url', self.remote, url], options)
      if mirror:
        with open(os.path.join(
            self.checkout_path, '.git', 'objects', 'info', 'alternates'),
            'w') as fh:
          fh.write(os.path.join(url, 'objects'))
      self._EnsureValidHeadObjectOrCheckout(revision, options, url)
      self._FetchAndReset(revision, file_list, options)

      return_early = True
    else:
      self._EnsureValidHeadObjectOrCheckout(revision, options, url)

    if return_early:
      return self._Capture(['rev-parse', '--verify', 'HEAD'])

    cur_branch = self._GetCurrentBranch()

    # Cases:
    # 0) HEAD is detached. Probably from our initial clone.
    #   - make sure HEAD is contained by a named ref, then update.
    # Cases 1-4. HEAD is a branch.
    # 1) current branch is not tracking a remote branch
    #   - try to rebase onto the new hash or branch
    # 2) current branch is tracking a remote branch with local committed
    #    changes, but the DEPS file switched to point to a hash
    #   - rebase those changes on top of the hash
    # 3) current branch is tracking a remote branch w/or w/out changes, and
    #    no DEPS switch
    #   - see if we can FF, if not, prompt the user for rebase, merge, or stop
    # 4) current branch is tracking a remote branch, but DEPS switches to a
    #    different remote branch, and
    #   a) current branch has no local changes, and --force:
    #      - checkout new branch
    #   b) current branch has local changes, and --force and --reset:
    #      - checkout new branch
    #   c) otherwise exit

    # GetUpstreamBranch returns something like 'refs/remotes/origin/master' for
    # a tracking branch
    # or 'master' if not a tracking branch (it's based on a specific rev/hash)
    # or it returns None if it couldn't find an upstream
    if cur_branch is None:
      upstream_branch = None
      current_type = "detached"
      logging.debug("Detached HEAD")
    else:
      upstream_branch = scm.GIT.GetUpstreamBranch(self.checkout_path)
      if not upstream_branch or not upstream_branch.startswith('refs/remotes'):
        current_type = "hash"
        logging.debug("Current branch is not tracking an upstream (remote)"
                      " branch.")
      elif upstream_branch.startswith('refs/remotes'):
        current_type = "branch"
      else:
        raise gclient_utils.Error('Invalid Upstream: %s' % upstream_branch)

    if not scm.GIT.IsValidRevision(self.checkout_path, revision, sha_only=True):
      # Update the remotes first so we have all the refs.
      remote_output = scm.GIT.Capture(['remote'] + verbose + ['update'],
              cwd=self.checkout_path)
      if verbose:
        self.Print(remote_output)

    self._UpdateBranchHeads(options, fetch=True)

    revision = self._AutoFetchRef(options, revision)

    # This is a big hammer, debatable if it should even be here...
    if options.force or options.reset:
      target = 'HEAD'
      if options.upstream and upstream_branch:
        target = upstream_branch
      self._Scrub(target, options)

    if current_type == 'detached':
      # case 0
      # We just did a Scrub, this is as clean as it's going to get. In
      # particular if HEAD is a commit that contains two versions of the same
      # file on a case-insensitive filesystem (e.g. 'a' and 'A'), there's no way
      # to actually "Clean" the checkout; that commit is uncheckoutable on this
      # system. The best we can do is carry forward to the checkout step.
      if not (options.force or options.reset):
        self._CheckClean(revision)
      self._CheckDetachedHead(revision, options)
      if self._Capture(['rev-list', '-n', '1', 'HEAD']) == revision:
        self.Print('Up-to-date; skipping checkout.')
      else:
        # 'git checkout' may need to overwrite existing untracked files. Allow
        # it only when nuclear options are enabled.
        self._Checkout(
            options,
            revision,
            force=(options.force and options.delete_unversioned_trees),
            quiet=True,
        )
      if not printed_path:
        self.Print('_____ %s at %s' % (self.relpath, revision), timestamp=False)
    elif current_type == 'hash':
      # case 1
      # Can't find a merge-base since we don't know our upstream. That makes
      # this command VERY likely to produce a rebase failure. For now we
      # assume origin is our upstream since that's what the old behavior was.
      upstream_branch = self.remote
      if options.revision or deps_revision:
        upstream_branch = revision
      self._AttemptRebase(upstream_branch, file_list, options,
                          printed_path=printed_path, merge=options.merge)
      printed_path = True
    elif rev_type == 'hash':
      # case 2
      self._AttemptRebase(upstream_branch, file_list, options,
                          newbase=revision, printed_path=printed_path,
                          merge=options.merge)
      printed_path = True
    elif remote_ref and ''.join(remote_ref) != upstream_branch:
      # case 4
      new_base = ''.join(remote_ref)
      if not printed_path:
        self.Print('_____ %s at %s' % (self.relpath, revision), timestamp=False)
      switch_error = ("Could not switch upstream branch from %s to %s\n"
                     % (upstream_branch, new_base) +
                     "Please use --force or merge or rebase manually:\n" +
                     "cd %s; git rebase %s\n" % (self.checkout_path, new_base) +
                     "OR git checkout -b <some new branch> %s" % new_base)
      force_switch = False
      if options.force:
        try:
          self._CheckClean(revision)
          # case 4a
          force_switch = True
        except gclient_utils.Error as e:
          if options.reset:
            # case 4b
            force_switch = True
          else:
            switch_error = '%s\n%s' % (e.message, switch_error)
      if force_switch:
        self.Print("Switching upstream branch from %s to %s" %
                   (upstream_branch, new_base))
        switch_branch = 'gclient_' + remote_ref[1]
        self._Capture(['branch', '-f', switch_branch, new_base])
        self._Checkout(options, switch_branch, force=True, quiet=True)
      else:
        # case 4c
        raise gclient_utils.Error(switch_error)
    else:
      # case 3 - the default case
      rebase_files = self._Capture(
          ['diff', upstream_branch, '--name-only']).split()
      if verbose:
        self.Print('Trying fast-forward merge to branch : %s' % upstream_branch)
      try:
        merge_args = ['merge']
        if options.merge:
          merge_args.append('--ff')
        else:
          merge_args.append('--ff-only')
        merge_args.append(upstream_branch)
        merge_output = self._Capture(merge_args)
      except subprocess2.CalledProcessError as e:
        rebase_files = []
        if re.match('fatal: Not possible to fast-forward, aborting.', e.stderr):
          if not printed_path:
            self.Print('_____ %s at %s' % (self.relpath, revision),
                       timestamp=False)
            printed_path = True
          while True:
            if not options.auto_rebase:
              try:
                action = self._AskForData(
                    'Cannot %s, attempt to rebase? '
                    '(y)es / (q)uit / (s)kip : ' %
                        ('merge' if options.merge else 'fast-forward merge'),
                    options)
              except ValueError:
                raise gclient_utils.Error('Invalid Character')
            if options.auto_rebase or re.match(r'yes|y', action, re.I):
              self._AttemptRebase(upstream_branch, rebase_files, options,
                                  printed_path=printed_path, merge=False)
              printed_path = True
              break
            elif re.match(r'quit|q', action, re.I):
              raise gclient_utils.Error("Can't fast-forward, please merge or "
                                        "rebase manually.\n"
                                        "cd %s && git " % self.checkout_path
                                        + "rebase %s" % upstream_branch)
            elif re.match(r'skip|s', action, re.I):
              self.Print('Skipping %s' % self.relpath)
              return
            else:
              self.Print('Input not recognized')
        elif re.match("error: Your local changes to '.*' would be "
                      "overwritten by merge.  Aborting.\nPlease, commit your "
                      "changes or stash them before you can merge.\n",
                      e.stderr):
          if not printed_path:
            self.Print('_____ %s at %s' % (self.relpath, revision),
                       timestamp=False)
            printed_path = True
          raise gclient_utils.Error(e.stderr)
        else:
          # Some other problem happened with the merge
          logging.error("Error during fast-forward merge in %s!" % self.relpath)
          self.Print(e.stderr)
          raise
      else:
        # Fast-forward merge was successful
        if not re.match('Already up-to-date.', merge_output) or verbose:
          if not printed_path:
            self.Print('_____ %s at %s' % (self.relpath, revision),
                       timestamp=False)
            printed_path = True
          self.Print(merge_output.strip())
          if not verbose:
            # Make the output a little prettier. It's nice to have some
            # whitespace between projects when syncing.
            self.Print('')

      if file_list is not None:
        file_list.extend(
            [os.path.join(self.checkout_path, f) for f in rebase_files])

    # If the rebase generated a conflict, abort and ask user to fix
    if self._IsRebasing():
      raise gclient_utils.Error('\n____ %s at %s\n'
                                '\nConflict while rebasing this branch.\n'
                                'Fix the conflict and run gclient again.\n'
                                'See man git-rebase for details.\n'
                                % (self.relpath, revision))

    if verbose:
      self.Print('Checked out revision %s' % self.revinfo(options, (), None),
                 timestamp=False)

    # If --reset and --delete_unversioned_trees are specified, remove any
    # untracked directories.
    if options.reset and options.delete_unversioned_trees:
      # GIT.CaptureStatus() uses 'dit diff' to compare to a specific SHA1 (the
      # merge-base by default), so doesn't include untracked files. So we use
      # 'git ls-files --directory --others --exclude-standard' here directly.
      paths = scm.GIT.Capture(
          ['ls-files', '--directory', '--others', '--exclude-standard'],
          self.checkout_path)
      for path in (p for p in paths.splitlines() if p.endswith('/')):
        full_path = os.path.join(self.checkout_path, path)
        if not os.path.islink(full_path):
          self.Print('_____ removing unversioned directory %s' % path)
          gclient_utils.rmtree(full_path)

    return self._Capture(['rev-parse', '--verify', 'HEAD'])
Ejemplo n.º 46
0
  def update(self, options, args, file_list):
    """Runs svn to update or transparently checkout the working copy.

    All updated files will be appended to file_list.

    Raises:
      Error: if can't get URL for relative path.
    """
    # Only update if git or hg is not controlling the directory.
    git_path = os.path.join(self.checkout_path, '.git')
    if os.path.exists(git_path):
      print('________ found .git directory; skipping %s' % self.relpath)
      return

    hg_path = os.path.join(self.checkout_path, '.hg')
    if os.path.exists(hg_path):
      print('________ found .hg directory; skipping %s' % self.relpath)
      return

    if args:
      raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args))

    # revision is the revision to match. It is None if no revision is specified,
    # i.e. the 'deps ain't pinned'.
    url, revision = gclient_utils.SplitUrlRevision(self.url)
    # Keep the original unpinned url for reference in case the repo is switched.
    base_url = url
    managed = True
    if options.revision:
      # Override the revision number.
      revision = str(options.revision)
    if revision:
      if revision != 'unmanaged':
        forced_revision = True
        # Reconstruct the url.
        url = '%s@%s' % (url, revision)
        rev_str = ' at %s' % revision
      else:
        managed = False
        revision = None
    else:
      forced_revision = False
      rev_str = ''

    if not os.path.exists(self.checkout_path):
      gclient_utils.safe_makedirs(os.path.dirname(self.checkout_path))
      # We need to checkout.
      command = ['checkout', url, self.checkout_path]
      command = self._AddAdditionalUpdateFlags(command, options, revision)
      self._RunAndGetFileList(command, options, file_list, self._root_dir)
      return

    if not managed:
      print ('________ unmanaged solution; skipping %s' % self.relpath)
      return

    # Get the existing scm url and the revision number of the current checkout.
    try:
      from_info = scm.SVN.CaptureLocalInfo(
          [], os.path.join(self.checkout_path, '.'))
    except (gclient_utils.Error, subprocess2.CalledProcessError):
      raise gclient_utils.Error(
          ('Can\'t update/checkout %s if an unversioned directory is present. '
           'Delete the directory and try again.') % self.checkout_path)

    if 'URL' not in from_info:
      raise gclient_utils.Error(
          ('gclient is confused. Couldn\'t get the url for %s.\n'
           'Try using @unmanaged.\n%s') % (
            self.checkout_path, from_info))

    # Look for locked directories.
    dir_info = scm.SVN.CaptureStatus(
        None, os.path.join(self.checkout_path, '.'))
    if any(d[0][2] == 'L' for d in dir_info):
      try:
        self._Run(['cleanup', self.checkout_path], options)
      except subprocess2.CalledProcessError, e:
        # Get the status again, svn cleanup may have cleaned up at least
        # something.
        dir_info = scm.SVN.CaptureStatus(
            None, os.path.join(self.checkout_path, '.'))

        # Try to fix the failures by removing troublesome files.
        for d in dir_info:
          if d[0][2] == 'L':
            if d[0][0] == '!' and options.force:
              print 'Removing troublesome path %s' % d[1]
              gclient_utils.rmtree(d[1])
            else:
              print 'Not removing troublesome path %s automatically.' % d[1]
              if d[0][0] == '!':
                print 'You can pass --force to enable automatic removal.'
              raise e
Ejemplo n.º 47
0
    def RunAndGetFileList(verbose, args, cwd, file_list, stdout=None):
        """Runs svn checkout, update, or status, output to stdout.

    The first item in args must be either "checkout", "update", or "status".

    svn's stdout is parsed to collect a list of files checked out or updated.
    These files are appended to file_list.  svn's stdout is also printed to
    sys.stdout as in Run.

    Args:
      verbose: If True, uses verbose output
      args: A sequence of command line parameters to be passed to svn.
      cwd: The directory where svn is to be run.

    Raises:
      Error: An error occurred while running the svn command.
    """
        stdout = stdout or sys.stdout
        if file_list is None:
            # Even if our caller doesn't care about file_list, we use it internally.
            file_list = []

        # svn update and svn checkout use the same pattern: the first three columns
        # are for file status, property status, and lock status.  This is followed
        # by two spaces, and then the path to the file.
        update_pattern = "^...  (.*)$"

        # The first three columns of svn status are the same as for svn update and
        # svn checkout.  The next three columns indicate addition-with-history,
        # switch, and remote lock status.  This is followed by one space, and then
        # the path to the file.
        status_pattern = "^...... (.*)$"

        # args[0] must be a supported command.  This will blow up if it's something
        # else, which is good.  Note that the patterns are only effective when
        # these commands are used in their ordinary forms, the patterns are invalid
        # for "svn status --show-updates", for example.
        pattern = {"checkout": update_pattern, "status": status_pattern, "update": update_pattern}[args[0]]
        compiled_pattern = re.compile(pattern)
        # Place an upper limit.
        backoff_time = 5
        retries = 0
        while True:
            retries += 1
            previous_list_len = len(file_list)
            failure = []

            def CaptureMatchingLines(line):
                match = compiled_pattern.search(line)
                if match:
                    file_list.append(match.group(1))
                if line.startswith("svn: "):
                    failure.append(line)

            try:
                gclient_utils.CheckCallAndFilterAndHeader(
                    ["svn"] + args, cwd=cwd, always=verbose, filter_fn=CaptureMatchingLines, stdout=stdout
                )
            except subprocess2.CalledProcessError:

                def IsKnownFailure():
                    for x in failure:
                        if (
                            x.startswith("svn: OPTIONS of")
                            or x.startswith("svn: PROPFIND of")
                            or x.startswith("svn: REPORT of")
                            or x.startswith("svn: Unknown hostname")
                            or x.startswith("svn: Server sent unexpected return value")
                            or x.startswith("svn: Can't connect to host")
                        ):
                            return True
                    return False

                # Subversion client is really misbehaving with Google Code.
                if args[0] == "checkout":
                    # Ensure at least one file was checked out, otherwise *delete* the
                    # directory.
                    if len(file_list) == previous_list_len:
                        if not IsKnownFailure():
                            # No known svn error was found, bail out.
                            raise
                        # No file were checked out, so make sure the directory is
                        # deleted in case it's messed up and try again.
                        # Warning: It's bad, it assumes args[2] is the directory
                        # argument.
                        if os.path.isdir(args[2]):
                            gclient_utils.rmtree(args[2])
                    else:
                        # Progress was made, convert to update since an aborted checkout
                        # is now an update.
                        args = ["update"] + args[1:]
                else:
                    # It was an update or export.
                    # We enforce that some progress has been made or a known failure.
                    if len(file_list) == previous_list_len and not IsKnownFailure():
                        # No known svn error was found and no progress, bail out.
                        raise
                if retries == 10:
                    raise
                print "Sleeping %.1f seconds and retrying...." % backoff_time
                time.sleep(backoff_time)
                backoff_time *= 1.3
                continue
            break
Ejemplo n.º 48
0
  def _Clone(self, revision, url, options):
    """Clone a git repository from the given URL.

    Once we've cloned the repo, we checkout a working branch if the specified
    revision is a branch head. If it is a tag or a specific commit, then we
    leave HEAD detached as it makes future updates simpler -- in this case the
    user should first create a new branch or switch to an existing branch before
    making changes in the repo."""
    if not options.verbose:
      # git clone doesn't seem to insert a newline properly before printing
      # to stdout
      self.Print('')
    cfg = gclient_utils.DefaultIndexPackConfig(url)
    clone_cmd = cfg + ['clone', '--no-checkout', '--progress']
    if self.cache_dir:
      clone_cmd.append('--shared')
    if options.verbose:
      clone_cmd.append('--verbose')
    clone_cmd.append(url)
    # If the parent directory does not exist, Git clone on Windows will not
    # create it, so we need to do it manually.
    parent_dir = os.path.dirname(self.checkout_path)
    gclient_utils.safe_makedirs(parent_dir)

    template_dir = None
    if hasattr(options, 'no_history') and options.no_history:
      if gclient_utils.IsGitSha(revision):
        # In the case of a subproject, the pinned sha is not necessarily the
        # head of the remote branch (so we can't just use --depth=N). Instead,
        # we tell git to fetch all the remote objects from SHA..HEAD by means of
        # a template git dir which has a 'shallow' file pointing to the sha.
        template_dir = tempfile.mkdtemp(
            prefix='_gclient_gittmp_%s' % os.path.basename(self.checkout_path),
            dir=parent_dir)
        self._Run(['init', '--bare', template_dir], options, cwd=self._root_dir)
        with open(os.path.join(template_dir, 'shallow'), 'w') as template_file:
          template_file.write(revision)
        clone_cmd.append('--template=' + template_dir)
      else:
        # Otherwise, we're just interested in the HEAD. Just use --depth.
        clone_cmd.append('--depth=1')

    tmp_dir = tempfile.mkdtemp(
        prefix='_gclient_%s_' % os.path.basename(self.checkout_path),
        dir=parent_dir)
    try:
      clone_cmd.append(tmp_dir)
      self._Run(clone_cmd, options, cwd=self._root_dir, retry=True)
      gclient_utils.safe_makedirs(self.checkout_path)
      gclient_utils.safe_rename(os.path.join(tmp_dir, '.git'),
                                os.path.join(self.checkout_path, '.git'))
    except:
      traceback.print_exc(file=self.out_fh)
      raise
    finally:
      if os.listdir(tmp_dir):
        self.Print('_____ removing non-empty tmp dir %s' % tmp_dir)
      gclient_utils.rmtree(tmp_dir)
      if template_dir:
        gclient_utils.rmtree(template_dir)
    self._UpdateBranchHeads(options, fetch=True)
    revision = self._AutoFetchRef(options, revision)
    remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
    self._Checkout(options, ''.join(remote_ref or revision), quiet=True)
    if self._GetCurrentBranch() is None:
      # Squelch git's very verbose detached HEAD warning and use our own
      self.Print(
        ('Checked out %s to a detached HEAD. Before making any commits\n'
         'in this repo, you should use \'git checkout <branch>\' to switch to\n'
         'an existing branch or use \'git checkout %s -b <branch>\' to\n'
         'create a new branch for your work.') % (revision, self.remote))
Ejemplo n.º 49
0
    def RunAndGetFileList(verbose, args, cwd, file_list, stdout=None):
        """Runs svn checkout, update, or status, output to stdout.

    The first item in args must be either "checkout", "update", or "status".

    svn's stdout is parsed to collect a list of files checked out or updated.
    These files are appended to file_list.  svn's stdout is also printed to
    sys.stdout as in Run.

    Args:
      verbose: If True, uses verbose output
      args: A sequence of command line parameters to be passed to svn.
      cwd: The directory where svn is to be run.

    Raises:
      Error: An error occurred while running the svn command.
    """
        stdout = stdout or sys.stdout
        if file_list is None:
            # Even if our caller doesn't care about file_list, we use it internally.
            file_list = []

        # svn update and svn checkout use the same pattern: the first three columns
        # are for file status, property status, and lock status.  This is followed
        # by two spaces, and then the path to the file.
        update_pattern = '^...  (.*)$'

        # The first three columns of svn status are the same as for svn update and
        # svn checkout.  The next three columns indicate addition-with-history,
        # switch, and remote lock status.  This is followed by one space, and then
        # the path to the file.
        status_pattern = '^...... (.*)$'

        # args[0] must be a supported command.  This will blow up if it's something
        # else, which is good.  Note that the patterns are only effective when
        # these commands are used in their ordinary forms, the patterns are invalid
        # for "svn status --show-updates", for example.
        pattern = {
            'checkout': update_pattern,
            'status': status_pattern,
            'update': update_pattern,
        }[args[0]]
        compiled_pattern = re.compile(pattern)
        # Place an upper limit.
        backoff_time = 5
        retries = 0
        while True:
            retries += 1
            previous_list_len = len(file_list)
            failure = []

            def CaptureMatchingLines(line):
                match = compiled_pattern.search(line)
                if match:
                    file_list.append(match.group(1))
                if line.startswith('svn: '):
                    failure.append(line)

            try:
                gclient_utils.CheckCallAndFilterAndHeader(
                    ['svn'] + args,
                    cwd=cwd,
                    always=verbose,
                    filter_fn=CaptureMatchingLines,
                    stdout=stdout)
            except subprocess2.CalledProcessError:

                def IsKnownFailure():
                    for x in failure:
                        if (x.startswith('svn: OPTIONS of')
                                or x.startswith('svn: PROPFIND of')
                                or x.startswith('svn: REPORT of')
                                or x.startswith('svn: Unknown hostname')
                                or x.startswith(
                                    'svn: Server sent unexpected return value')
                                or
                                x.startswith('svn: Can\'t connect to host')):
                            return True
                    return False

                # Subversion client is really misbehaving with Google Code.
                if args[0] == 'checkout':
                    # Ensure at least one file was checked out, otherwise *delete* the
                    # directory.
                    if len(file_list) == previous_list_len:
                        if not IsKnownFailure():
                            # No known svn error was found, bail out.
                            raise
                        # No file were checked out, so make sure the directory is
                        # deleted in case it's messed up and try again.
                        # Warning: It's bad, it assumes args[2] is the directory
                        # argument.
                        if os.path.isdir(args[2]):
                            gclient_utils.rmtree(args[2])
                    else:
                        # Progress was made, convert to update since an aborted checkout
                        # is now an update.
                        args = ['update'] + args[1:]
                else:
                    # It was an update or export.
                    # We enforce that some progress has been made or a known failure.
                    if len(file_list
                           ) == previous_list_len and not IsKnownFailure():
                        # No known svn error was found and no progress, bail out.
                        raise
                if retries == 10:
                    raise
                print "Sleeping %.1f seconds and retrying...." % backoff_time
                time.sleep(backoff_time)
                backoff_time *= 1.3
                continue
            break
Ejemplo n.º 50
0
 def tear_down_git(self):
   if self.trial.SHOULD_LEAK:
     return False
   logging.debug('Removing %s' % self.git_base)
   gclient_utils.rmtree(self.git_base)
   return True
Ejemplo n.º 51
0
def drover(options, args):
  revision = options.revert or options.merge

  # Initialize some variables used below. They can be overwritten by
  # the drover.properties file.
  BASE_URL = "svn://svn.chromium.org/chrome"
  REVERT_ALT_URLS = ['svn://svn.chromium.org/blink',
                     'svn://svn.chromium.org/chrome-internal',
                     'svn://svn.chromium.org/native_client']
  TRUNK_URL = BASE_URL + "/trunk/src"
  BRANCH_URL = BASE_URL + "/branches/$branch/src"
  SKIP_CHECK_WORKING = True
  PROMPT_FOR_AUTHOR = False
  NO_ALT_URLS = options.no_alt_urls

  DEFAULT_WORKING = "drover_" + str(revision)
  if options.branch:
    DEFAULT_WORKING += ("_" + options.branch)

  if not isMinimumSVNVersion(1, 5):
    print "You need to use at least SVN version 1.5.x"
    return 1

  # Override the default properties if there is a drover.properties file.
  global file_pattern_
  if os.path.exists("drover.properties"):
    print 'Using options from %s' % os.path.join(
        os.getcwd(), 'drover.properties')
    FILE_PATTERN = file_pattern_
    f = open("drover.properties")
    exec(f)
    f.close()
    if FILE_PATTERN:
      file_pattern_ = FILE_PATTERN
    NO_ALT_URLS = True

  if options.revert and options.branch:
    print 'Note: --branch is usually not needed for reverts.'
    url = BRANCH_URL.replace("$branch", options.branch)
  elif options.merge and options.sbranch:
    url = BRANCH_URL.replace("$branch", options.sbranch)
  elif options.revert:
    url = options.url or BASE_URL
    file_pattern_ = r"[ ]+([MADUC])[ ]+((/.*)/(.*))"
  else:
    url = TRUNK_URL

  working = options.workdir or DEFAULT_WORKING

  if options.local:
    working = os.getcwd()
    if not inCheckoutRoot(working):
      print "'%s' appears not to be the root of a working copy" % working
      return 1
    if (isSVNDirty() and not
        prompt("Working copy contains uncommitted files. Continue?")):
      return 1

  if options.revert and not NO_ALT_URLS and not options.url:
    for cur_url in [url] + REVERT_ALT_URLS:
      try:
        commit_date_str = getSVNInfo(
            cur_url, options.revert).get('Last Changed Date', 'x').split()[0]
        commit_date = datetime.datetime.strptime(commit_date_str, '%Y-%m-%d')
        if (datetime.datetime.now() - commit_date).days < 180:
          if cur_url != url:
            print 'Guessing svn repo: %s.' % cur_url,
            print 'Use --no-alt-urls to disable heuristic.'
            url = cur_url
          break
      except ValueError:
        pass
  command = 'svn log ' + url + " -r "+str(revision) + " -v"
  os.system(command)

  if not (options.revertbot or prompt("Is this the correct revision?")):
    return 0

  if (os.path.exists(working)) and not options.local:
    if not (options.revertbot or SKIP_CHECK_WORKING or
        prompt("Working directory: '%s' already exists, clobber?" % working)):
      return 0
    gclient_utils.rmtree(working)

  if not options.local:
    os.makedirs(working)
    os.chdir(working)

  if options.merge:
    action = "Merge"
    if not options.local:
      branch_url = BRANCH_URL.replace("$branch", options.branch)
      # Checkout everything but stuff that got added into a new dir
      checkoutRevision(url, revision, branch_url)
    # Merge everything that changed
    mergeRevision(url, revision)
    # "Export" files that were added from the source and add them to branch
    exportRevision(url, revision)
    # Delete directories that were deleted (file deletes are handled in the
    # merge).
    deleteRevision(url, revision)
  elif options.revert:
    action = "Revert"
    pop_em = not options.url
    checkoutRevision(url, revision, url, True, pop_em)
    revertRevision(url, revision)
    revertExportRevision(url, revision)

  # Check the base url so we actually find the author who made the change
  if options.auditor:
    author = options.auditor
  else:
    author = getAuthor(url, revision)
    if not author:
      author = getAuthor(TRUNK_URL, revision)

  # Check that the author of the CL is different than the user making
  # the revert.  If they're the same, then we'll want to prompt the user
  # for a different reviewer to TBR.
  current_users = getCurrentSVNUsers(BASE_URL)
  is_self_revert = options.revert and author in current_users

  filename = str(revision)+".txt"
  out = open(filename,"w")
  drover_title = '%s %s' % (action, revision)
  revision_log = getRevisionLog(url, revision).splitlines()
  if revision_log:
    commit_title = revision_log[0]
    # Limit title to 68 chars so git log --oneline is <80 chars.
    max_commit_title = 68 - (len(drover_title) + 3)
    if len(commit_title) > max_commit_title:
      commit_title = commit_title[:max_commit_title-3] + '...'
    drover_title += ' "%s"' % commit_title
  out.write(drover_title + '\n\n')
  for line in revision_log:
    out.write('> %s\n' % line)
  if author:
    out.write("\nTBR=" + author)
  out.close()

  change_cmd = 'change ' + str(revision) + " " + filename
  if options.revertbot:
    if sys.platform == 'win32':
      os.environ['SVN_EDITOR'] = 'cmd.exe /c exit'
    else:
      os.environ['SVN_EDITOR'] = 'true'
  runGcl(change_cmd)
  os.unlink(filename)

  if options.local:
    return 0

  print author
  print revision
  print ("gcl upload " + str(revision) +
         " --send_mail --no_presubmit --reviewers=" + author)

  if options.revertbot or prompt("Would you like to upload?"):
    if PROMPT_FOR_AUTHOR or is_self_revert:
      author = text_prompt("Enter new author or press enter to accept default",
                           author)
    if options.revertbot and options.revertbot_reviewers:
      author += ","
      author += options.revertbot_reviewers
    gclUpload(revision, author)
  else:
    print "Deleting the changelist."
    print "gcl delete " + str(revision)
    runGcl("delete " + str(revision))
    return 0

  # We commit if the reverbot is set to commit automatically, or if this is
  # not the revertbot and the user agrees.
  if options.revertbot_commit or (not options.revertbot and
                                  prompt("Would you like to commit?")):
    print "gcl commit " + str(revision) + " --no_presubmit --force"
    return runGcl("commit " + str(revision) + " --no_presubmit --force")
  else:
    return 0