コード例 #1
0
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False, ignore_lock=False, lock_timeout=0):
    assert self.GetCachePath()
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    lockfile = Lockfile(self.mirror_path, lock_timeout)
    if not ignore_lock:
      lockfile.lock()

    tempdir = None
    try:
      tempdir = self._ensure_bootstrapped(depth, bootstrap)
      rundir = tempdir or self.mirror_path
      self._fetch(rundir, verbose, depth)
    except ClobberNeeded:
      # This is a major failure, we need to clean and force a bootstrap.
      gclient_utils.rmtree(rundir)
      self.print(GIT_CACHE_CORRUPT_MESSAGE)
      tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
      assert tempdir
      self._fetch(tempdir or self.mirror_path, verbose, depth)
    finally:
      if tempdir:
        if os.path.exists(self.mirror_path):
          gclient_utils.rmtree(self.mirror_path)
        self.Rename(tempdir, self.mirror_path)
      if not ignore_lock:
        lockfile.unlock()
コード例 #2
0
ファイル: gclient_scm.py プロジェクト: DerinoGamer/dotfiles
 def updatesingle(self, options, args, file_list):
   filename = args.pop()
   if scm.SVN.AssertVersion("1.5")[0]:
     if not os.path.exists(os.path.join(self.checkout_path, '.svn')):
       # Create an empty checkout and then update the one file we want.  Future
       # operations will only apply to the one file we checked out.
       command = ["checkout", "--depth", "empty", self.url, self.checkout_path]
       self._Run(command, options, cwd=self._root_dir)
       if os.path.exists(os.path.join(self.checkout_path, filename)):
         os.remove(os.path.join(self.checkout_path, filename))
       command = ["update", filename]
       self._RunAndGetFileList(command, options, file_list)
     # After the initial checkout, we can use update as if it were any other
     # dep.
     self.update(options, args, file_list)
   else:
     # If the installed version of SVN doesn't support --depth, fallback to
     # just exporting the file.  This has the downside that revision
     # information is not stored next to the file, so we will have to
     # re-export the file every time we sync.
     if not os.path.exists(self.checkout_path):
       gclient_utils.safe_makedirs(self.checkout_path)
     command = ["export", os.path.join(self.url, filename),
                os.path.join(self.checkout_path, filename)]
     command = self._AddAdditionalUpdateFlags(command, options,
         options.revision)
     self._Run(command, options, cwd=self._root_dir)
コード例 #3
0
    def populate(self,
                 depth=None,
                 no_fetch_tags=False,
                 shallow=False,
                 bootstrap=False,
                 verbose=False,
                 lock_timeout=0,
                 reset_fetch_config=False):
        assert self.GetCachePath()
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        with lockfile.lock(self.mirror_path, lock_timeout):
            try:
                self._ensure_bootstrapped(depth, bootstrap, reset_fetch_config)
                self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
                            reset_fetch_config)
            except ClobberNeeded:
                # This is a major failure, we need to clean and force a bootstrap.
                gclient_utils.rmtree(self.mirror_path)
                self.print(GIT_CACHE_CORRUPT_MESSAGE)
                self._ensure_bootstrapped(depth,
                                          bootstrap,
                                          reset_fetch_config,
                                          force=True)
                self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
                            reset_fetch_config)
コード例 #4
0
ファイル: git_cache.py プロジェクト: sharpglasses/depot_tools
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False, ignore_lock=False, lock_timeout=0,
               reset_fetch_config=False):
    assert self.GetCachePath()
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    lockfile = Lockfile(self.mirror_path, lock_timeout)
    if not ignore_lock:
      lockfile.lock()

    tempdir = None
    try:
      tempdir = self._ensure_bootstrapped(depth, bootstrap)
      rundir = tempdir or self.mirror_path
      self._fetch(rundir, verbose, depth, reset_fetch_config)
    except ClobberNeeded:
      # This is a major failure, we need to clean and force a bootstrap.
      gclient_utils.rmtree(rundir)
      self.print(GIT_CACHE_CORRUPT_MESSAGE)
      tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
      assert tempdir
      self._fetch(tempdir, verbose, depth, reset_fetch_config)
    finally:
      if tempdir:
        if os.path.exists(self.mirror_path):
          gclient_utils.rmtree(self.mirror_path)
        self.Rename(tempdir, self.mirror_path)
      if not ignore_lock:
        lockfile.unlock()
コード例 #5
0
ファイル: git_cache.py プロジェクト: mYoda/CustomBrs
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False, ignore_lock=False):
    assert self.GetCachePath()
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    lockfile = Lockfile(self.mirror_path)
    if not ignore_lock:
      lockfile.lock()

    tempdir = None
    try:
      tempdir = self._ensure_bootstrapped(depth, bootstrap)
      rundir = tempdir or self.mirror_path
      self._fetch(rundir, verbose, depth)
    except RefsHeadsFailedToFetch:
      # This is a major failure, we need to clean and force a bootstrap.
      gclient_utils.rmtree(rundir)
      self.print(GIT_CACHE_CORRUPT_MESSAGE)
      tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
      assert tempdir
      self._fetch(tempdir or self.mirror_path, verbose, depth)
    finally:
      if tempdir:
        os.rename(tempdir, self.mirror_path)
      if not ignore_lock:
        lockfile.unlock()
コード例 #6
0
ファイル: git_cache.py プロジェクト: duanwujie/depot_tools
    def populate(self, depth=None, shallow=False, bootstrap=False, verbose=False, ignore_lock=False):
        assert self.GetCachePath()
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        lockfile = Lockfile(self.mirror_path)
        if not ignore_lock:
            lockfile.lock()

        tempdir = None
        try:
            tempdir = self._ensure_bootstrapped(depth, bootstrap)
            rundir = tempdir or self.mirror_path
            self._fetch(rundir, verbose, depth)
        except RefsHeadsFailedToFetch:
            # This is a major failure, we need to clean and force a bootstrap.
            gclient_utils.rmtree(rundir)
            self.print(GIT_CACHE_CORRUPT_MESSAGE)
            tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
            assert tempdir
            self._fetch(tempdir or self.mirror_path, verbose, depth)
        finally:
            if tempdir:
                try:
                    if os.path.exists(self.mirror_path):
                        gclient_utils.rmtree(self.mirror_path)
                    os.rename(tempdir, self.mirror_path)
                except OSError as e:
                    # This is somehow racy on Windows.
                    # Catching OSError because WindowsError isn't portable and
                    # pylint complains.
                    self.print("Error moving %s to %s: %s" % (tempdir, self.mirror_path, str(e)))
            if not ignore_lock:
                lockfile.unlock()
コード例 #7
0
ファイル: gclient_scm.py プロジェクト: DerinoGamer/dotfiles
  def _Clone(self, revision, url, options):
    """Clone a git repository from the given URL.

    Once we've cloned the repo, we checkout a working branch if the specified
    revision is a branch head. If it is a tag or a specific commit, then we
    leave HEAD detached as it makes future updates simpler -- in this case the
    user should first create a new branch or switch to an existing branch before
    making changes in the repo."""
    if not options.verbose:
      # git clone doesn't seem to insert a newline properly before printing
      # to stdout
      print('')

    clone_cmd = ['clone', '--progress']
    if revision.startswith('refs/heads/'):
      clone_cmd.extend(['-b', revision.replace('refs/heads/', '')])
      detach_head = False
    else:
      detach_head = True
    if options.verbose:
      clone_cmd.append('--verbose')
    clone_cmd.extend([url, self.checkout_path])

    # If the parent directory does not exist, Git clone on Windows will not
    # create it, so we need to do it manually.
    parent_dir = os.path.dirname(self.checkout_path)
    if not os.path.exists(parent_dir):
      gclient_utils.safe_makedirs(parent_dir)

    percent_re = re.compile('.* ([0-9]{1,2})% .*')
    def _GitFilter(line):
      # git uses an escape sequence to clear the line; elide it.
      esc = line.find(unichr(033))
      if esc > -1:
        line = line[:esc]
      match = percent_re.match(line)
      if not match or not int(match.group(1)) % 10:
        print '%s' % line

    for _ in range(3):
      try:
        self._Run(clone_cmd, options, cwd=self._root_dir, filter_fn=_GitFilter,
                  print_stdout=False)
        break
      except subprocess2.CalledProcessError, e:
        # Too bad we don't have access to the actual output yet.
        # We should check for "transfer closed with NNN bytes remaining to
        # read". In the meantime, just make sure .git exists.
        if (e.returncode == 128 and
            os.path.exists(os.path.join(self.checkout_path, '.git'))):
          print(str(e))
          print('Retrying...')
          continue
        raise e
コード例 #8
0
ファイル: git_cache.py プロジェクト: quartexNOR/webkit.js
    def populate(self,
                 depth=None,
                 shallow=False,
                 bootstrap=False,
                 verbose=False):
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        v = []
        if verbose:
            v = ['-v', '--progress']

        d = []
        if depth:
            d = ['--depth', str(depth)]

        with Lockfile(self.mirror_path):
            # Setup from scratch if the repo is new or is in a bad state.
            tempdir = None
            if not os.path.exists(os.path.join(self.mirror_path, 'config')):
                gclient_utils.rmtree(self.mirror_path)
                tempdir = tempfile.mkdtemp(suffix=self.basedir,
                                           dir=self.GetCachePath())
                bootstrapped = not depth and bootstrap and self.bootstrap_repo(
                    tempdir)
                if not bootstrapped:
                    self.RunGit(['init', '--bare'], cwd=tempdir)
            else:
                if depth and os.path.exists(
                        os.path.join(self.mirror_path, 'shallow')):
                    logging.warn(
                        'Shallow fetch requested, but repo cache already exists.'
                    )
                d = []

            rundir = tempdir or self.mirror_path
            self.config(rundir)
            fetch_cmd = ['fetch'] + v + d + ['origin']
            fetch_specs = subprocess.check_output(
                [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
                cwd=rundir).strip().splitlines()
            for spec in fetch_specs:
                try:
                    self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
                except subprocess.CalledProcessError:
                    logging.warn('Fetch of %s failed' % spec)
            if tempdir:
                os.rename(tempdir, self.mirror_path)
コード例 #9
0
ファイル: git_cache.py プロジェクト: Happy-Ferret/webkit.js
  def populate(self, depth=None, shallow=False, bootstrap=False,
               verbose=False):
    if shallow and not depth:
      depth = 10000
    gclient_utils.safe_makedirs(self.GetCachePath())

    v = []
    if verbose:
      v = ['-v', '--progress']

    d = []
    if depth:
      d = ['--depth', str(depth)]


    with Lockfile(self.mirror_path):
      # Setup from scratch if the repo is new or is in a bad state.
      tempdir = None
      if not os.path.exists(os.path.join(self.mirror_path, 'config')):
        gclient_utils.rmtree(self.mirror_path)
        tempdir = tempfile.mkdtemp(
            suffix=self.basedir, dir=self.GetCachePath())
        bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
        if not bootstrapped:
          self.RunGit(['init', '--bare'], cwd=tempdir)
      else:
        if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
          logging.warn(
              'Shallow fetch requested, but repo cache already exists.')
        d = []

      rundir = tempdir or self.mirror_path
      self.config(rundir)
      fetch_cmd = ['fetch'] + v + d + ['origin']
      fetch_specs = subprocess.check_output(
          [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
          cwd=rundir).strip().splitlines()
      for spec in fetch_specs:
        try:
          self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
        except subprocess.CalledProcessError:
          logging.warn('Fetch of %s failed' % spec)
      if tempdir:
        os.rename(tempdir, self.mirror_path)
コード例 #10
0
    def populate(self,
                 depth=None,
                 shallow=False,
                 bootstrap=False,
                 verbose=False,
                 ignore_lock=False,
                 lock_timeout=0):
        assert self.GetCachePath()
        if shallow and not depth:
            depth = 10000
        gclient_utils.safe_makedirs(self.GetCachePath())

        lockfile = Lockfile(self.mirror_path, lock_timeout)
        if not ignore_lock:
            lockfile.lock()

        tempdir = None
        try:
            tempdir = self._ensure_bootstrapped(depth, bootstrap)
            rundir = tempdir or self.mirror_path
            self._fetch(rundir, verbose, depth)
        except RefsHeadsFailedToFetch:
            # This is a major failure, we need to clean and force a bootstrap.
            gclient_utils.rmtree(rundir)
            self.print(GIT_CACHE_CORRUPT_MESSAGE)
            tempdir = self._ensure_bootstrapped(depth, bootstrap, force=True)
            assert tempdir
            self._fetch(tempdir or self.mirror_path, verbose, depth)
        finally:
            if tempdir:
                try:
                    if os.path.exists(self.mirror_path):
                        gclient_utils.rmtree(self.mirror_path)
                    os.rename(tempdir, self.mirror_path)
                except OSError as e:
                    # This is somehow racy on Windows.
                    # Catching OSError because WindowsError isn't portable and
                    # pylint complains.
                    self.print('Error moving %s to %s: %s' %
                               (tempdir, self.mirror_path, str(e)))
            if not ignore_lock:
                lockfile.unlock()
コード例 #11
0
ファイル: gclient_scm.py プロジェクト: DerinoGamer/dotfiles
  def update(self, options, args, file_list):
    """Runs svn to update or transparently checkout the working copy.

    All updated files will be appended to file_list.

    Raises:
      Error: if can't get URL for relative path.
    """
    # Only update if git or hg is not controlling the directory.
    git_path = os.path.join(self.checkout_path, '.git')
    if os.path.exists(git_path):
      print('________ found .git directory; skipping %s' % self.relpath)
      return

    hg_path = os.path.join(self.checkout_path, '.hg')
    if os.path.exists(hg_path):
      print('________ found .hg directory; skipping %s' % self.relpath)
      return

    if args:
      raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args))

    # revision is the revision to match. It is None if no revision is specified,
    # i.e. the 'deps ain't pinned'.
    url, revision = gclient_utils.SplitUrlRevision(self.url)
    # Keep the original unpinned url for reference in case the repo is switched.
    base_url = url
    managed = True
    if options.revision:
      # Override the revision number.
      revision = str(options.revision)
    if revision:
      if revision != 'unmanaged':
        forced_revision = True
        # Reconstruct the url.
        url = '%s@%s' % (url, revision)
        rev_str = ' at %s' % revision
      else:
        managed = False
        revision = None
    else:
      forced_revision = False
      rev_str = ''

    if not os.path.exists(self.checkout_path):
      gclient_utils.safe_makedirs(os.path.dirname(self.checkout_path))
      # We need to checkout.
      command = ['checkout', url, self.checkout_path]
      command = self._AddAdditionalUpdateFlags(command, options, revision)
      self._RunAndGetFileList(command, options, file_list, self._root_dir)
      return

    if not managed:
      print ('________ unmanaged solution; skipping %s' % self.relpath)
      return

    # Get the existing scm url and the revision number of the current checkout.
    try:
      from_info = scm.SVN.CaptureLocalInfo(
          [], os.path.join(self.checkout_path, '.'))
    except (gclient_utils.Error, subprocess2.CalledProcessError):
      raise gclient_utils.Error(
          ('Can\'t update/checkout %s if an unversioned directory is present. '
           'Delete the directory and try again.') % self.checkout_path)

    if 'URL' not in from_info:
      raise gclient_utils.Error(
          ('gclient is confused. Couldn\'t get the url for %s.\n'
           'Try using @unmanaged.\n%s') % (
            self.checkout_path, from_info))

    # Look for locked directories.
    dir_info = scm.SVN.CaptureStatus(
        None, os.path.join(self.checkout_path, '.'))
    if any(d[0][2] == 'L' for d in dir_info):
      try:
        self._Run(['cleanup', self.checkout_path], options)
      except subprocess2.CalledProcessError, e:
        # Get the status again, svn cleanup may have cleaned up at least
        # something.
        dir_info = scm.SVN.CaptureStatus(
            None, os.path.join(self.checkout_path, '.'))

        # Try to fix the failures by removing troublesome files.
        for d in dir_info:
          if d[0][2] == 'L':
            if d[0][0] == '!' and options.force:
              print 'Removing troublesome path %s' % d[1]
              gclient_utils.rmtree(d[1])
            else:
              print 'Not removing troublesome path %s automatically.' % d[1]
              if d[0][0] == '!':
                print 'You can pass --force to enable automatic removal.'
              raise e
コード例 #12
0
ファイル: gclient_scm.py プロジェクト: DerinoGamer/dotfiles
  def update(self, options, args, file_list):
    """Runs git to update or transparently checkout the working copy.

    All updated files will be appended to file_list.

    Raises:
      Error: if can't get URL for relative path.
    """
    if args:
      raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args))

    self._CheckMinVersion("1.6.6")

    default_rev = "refs/heads/master"
    url, deps_revision = gclient_utils.SplitUrlRevision(self.url)
    rev_str = ""
    revision = deps_revision
    managed = True
    if options.revision:
      # Override the revision number.
      revision = str(options.revision)
    if revision == 'unmanaged':
      revision = None
      managed = False
    if not revision:
      revision = default_rev

    if gclient_utils.IsDateRevision(revision):
      # Date-revisions only work on git-repositories if the reflog hasn't
      # expired yet. Use rev-list to get the corresponding revision.
      #  git rev-list -n 1 --before='time-stamp' branchname
      if options.transitive:
        print('Warning: --transitive only works for SVN repositories.')
        revision = default_rev

    rev_str = ' at %s' % revision
    files = []

    printed_path = False
    verbose = []
    if options.verbose:
      print('\n_____ %s%s' % (self.relpath, rev_str))
      verbose = ['--verbose']
      printed_path = True

    if revision.startswith('refs/heads/'):
      rev_type = "branch"
    elif revision.startswith('origin/'):
      # For compatability with old naming, translate 'origin' to 'refs/heads'
      revision = revision.replace('origin/', 'refs/heads/')
      rev_type = "branch"
    else:
      # hash is also a tag, only make a distinction at checkout
      rev_type = "hash"

    if not os.path.exists(self.checkout_path):
      gclient_utils.safe_makedirs(os.path.dirname(self.checkout_path))
      self._Clone(revision, url, options)
      files = self._Capture(['ls-files']).splitlines()
      file_list.extend([os.path.join(self.checkout_path, f) for f in files])
      if not verbose:
        # Make the output a little prettier. It's nice to have some whitespace
        # between projects when cloning.
        print('')
      return

    if not managed:
      print ('________ unmanaged solution; skipping %s' % self.relpath)
      return

    if not os.path.exists(os.path.join(self.checkout_path, '.git')):
      raise gclient_utils.Error('\n____ %s%s\n'
                                '\tPath is not a git repo. No .git dir.\n'
                                '\tTo resolve:\n'
                                '\t\trm -rf %s\n'
                                '\tAnd run gclient sync again\n'
                                % (self.relpath, rev_str, self.relpath))

    # See if the url has changed (the unittests use git://foo for the url, let
    # that through).
    current_url = self._Capture(['config', 'remote.origin.url'])
    # TODO(maruel): Delete url != 'git://foo' since it's just to make the
    # unit test pass. (and update the comment above)
    if current_url != url and url != 'git://foo':
      print('_____ switching %s to a new upstream' % self.relpath)
      # Make sure it's clean
      self._CheckClean(rev_str)
      # Switch over to the new upstream
      self._Run(['remote', 'set-url', 'origin', url], options)
      quiet = []
      if not options.verbose:
        quiet = ['--quiet']
      self._Run(['fetch', 'origin', '--prune'] + quiet, options)
      self._Run(['reset', '--hard', 'origin/master'] + quiet, options)
      files = self._Capture(['ls-files']).splitlines()
      file_list.extend([os.path.join(self.checkout_path, f) for f in files])
      return

    cur_branch = self._GetCurrentBranch()

    # Cases:
    # 0) HEAD is detached. Probably from our initial clone.
    #   - make sure HEAD is contained by a named ref, then update.
    # Cases 1-4. HEAD is a branch.
    # 1) current branch is not tracking a remote branch (could be git-svn)
    #   - try to rebase onto the new hash or branch
    # 2) current branch is tracking a remote branch with local committed
    #    changes, but the DEPS file switched to point to a hash
    #   - rebase those changes on top of the hash
    # 3) current branch is tracking a remote branch w/or w/out changes,
    #    no switch
    #   - see if we can FF, if not, prompt the user for rebase, merge, or stop
    # 4) current branch is tracking a remote branch, switches to a different
    #    remote branch
    #   - exit

    # GetUpstreamBranch returns something like 'refs/remotes/origin/master' for
    # a tracking branch
    # or 'master' if not a tracking branch (it's based on a specific rev/hash)
    # or it returns None if it couldn't find an upstream
    if cur_branch is None:
      upstream_branch = None
      current_type = "detached"
      logging.debug("Detached HEAD")
    else:
      upstream_branch = scm.GIT.GetUpstreamBranch(self.checkout_path)
      if not upstream_branch or not upstream_branch.startswith('refs/remotes'):
        current_type = "hash"
        logging.debug("Current branch is not tracking an upstream (remote)"
                      " branch.")
      elif upstream_branch.startswith('refs/remotes'):
        current_type = "branch"
      else:
        raise gclient_utils.Error('Invalid Upstream: %s' % upstream_branch)

    # Update the remotes first so we have all the refs.
    backoff_time = 5
    for _ in range(10):
      try:
        remote_output = scm.GIT.Capture(
            ['remote'] + verbose + ['update'],
            cwd=self.checkout_path)
        break
      except subprocess2.CalledProcessError, e:
        # Hackish but at that point, git is known to work so just checking for
        # 502 in stderr should be fine.
        if '502' in e.stderr:
          print(str(e))
          print('Sleeping %.1f seconds and retrying...' % backoff_time)
          time.sleep(backoff_time)
          backoff_time *= 1.3
          continue
        raise
コード例 #13
0
ファイル: gclient_scm.py プロジェクト: mlkt/depot_tools
  def _Clone(self, revision, url, options):
    """Clone a git repository from the given URL.

    Once we've cloned the repo, we checkout a working branch if the specified
    revision is a branch head. If it is a tag or a specific commit, then we
    leave HEAD detached as it makes future updates simpler -- in this case the
    user should first create a new branch or switch to an existing branch before
    making changes in the repo."""
    if not options.verbose:
      # git clone doesn't seem to insert a newline properly before printing
      # to stdout
      self.Print('')
    cfg = gclient_utils.DefaultIndexPackConfig(url)
    clone_cmd = cfg + ['clone', '--no-checkout', '--progress']
    if self.cache_dir:
      clone_cmd.append('--shared')
    if options.verbose:
      clone_cmd.append('--verbose')
    clone_cmd.append(url)
    # If the parent directory does not exist, Git clone on Windows will not
    # create it, so we need to do it manually.
    parent_dir = os.path.dirname(self.checkout_path)
    gclient_utils.safe_makedirs(parent_dir)

    template_dir = None
    if hasattr(options, 'no_history') and options.no_history:
      if gclient_utils.IsGitSha(revision):
        # In the case of a subproject, the pinned sha is not necessarily the
        # head of the remote branch (so we can't just use --depth=N). Instead,
        # we tell git to fetch all the remote objects from SHA..HEAD by means of
        # a template git dir which has a 'shallow' file pointing to the sha.
        template_dir = tempfile.mkdtemp(
            prefix='_gclient_gittmp_%s' % os.path.basename(self.checkout_path),
            dir=parent_dir)
        self._Run(['init', '--bare', template_dir], options, cwd=self._root_dir)
        with open(os.path.join(template_dir, 'shallow'), 'w') as template_file:
          template_file.write(revision)
        clone_cmd.append('--template=' + template_dir)
      else:
        # Otherwise, we're just interested in the HEAD. Just use --depth.
        clone_cmd.append('--depth=1')

    tmp_dir = tempfile.mkdtemp(
        prefix='_gclient_%s_' % os.path.basename(self.checkout_path),
        dir=parent_dir)
    try:
      clone_cmd.append(tmp_dir)
      self._Run(clone_cmd, options, cwd=self._root_dir, retry=True)
      gclient_utils.safe_makedirs(self.checkout_path)
      gclient_utils.safe_rename(os.path.join(tmp_dir, '.git'),
                                os.path.join(self.checkout_path, '.git'))
    except:
      traceback.print_exc(file=self.out_fh)
      raise
    finally:
      if os.listdir(tmp_dir):
        self.Print('_____ removing non-empty tmp dir %s' % tmp_dir)
      gclient_utils.rmtree(tmp_dir)
      if template_dir:
        gclient_utils.rmtree(template_dir)
    self._UpdateBranchHeads(options, fetch=True)
    revision = self._AutoFetchRef(options, revision)
    remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
    self._Checkout(options, ''.join(remote_ref or revision), quiet=True)
    if self._GetCurrentBranch() is None:
      # Squelch git's very verbose detached HEAD warning and use our own
      self.Print(
        ('Checked out %s to a detached HEAD. Before making any commits\n'
         'in this repo, you should use \'git checkout <branch>\' to switch to\n'
         'an existing branch or use \'git checkout %s -b <branch>\' to\n'
         'create a new branch for your work.') % (revision, self.remote))
コード例 #14
0
ファイル: gclient_scm.py プロジェクト: mlkt/depot_tools
  def update(self, options, args, file_list):
    """Runs git to update or transparently checkout the working copy.

    All updated files will be appended to file_list.

    Raises:
      Error: if can't get URL for relative path.
    """
    if args:
      raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args))

    self._CheckMinVersion("1.6.6")

    # If a dependency is not pinned, track the default remote branch.
    default_rev = 'refs/remotes/%s/master' % self.remote
    url, deps_revision = gclient_utils.SplitUrlRevision(self.url)
    revision = deps_revision
    managed = True
    if options.revision:
      # Override the revision number.
      revision = str(options.revision)
    if revision == 'unmanaged':
      # Check again for a revision in case an initial ref was specified
      # in the url, for example bla.git@refs/heads/custombranch
      revision = deps_revision
      managed = False
    if not revision:
      revision = default_rev

    if managed:
      self._DisableHooks()

    printed_path = False
    verbose = []
    if options.verbose:
      self.Print('_____ %s at %s' % (self.relpath, revision), timestamp=False)
      verbose = ['--verbose']
      printed_path = True

    remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
    if remote_ref:
      # Rewrite remote refs to their local equivalents.
      revision = ''.join(remote_ref)
      rev_type = "branch"
    elif revision.startswith('refs/'):
      # Local branch? We probably don't want to support, since DEPS should
      # always specify branches as they are in the upstream repo.
      rev_type = "branch"
    else:
      # hash is also a tag, only make a distinction at checkout
      rev_type = "hash"

    mirror = self._GetMirror(url, options)
    if mirror:
      url = mirror.mirror_path

    # If we are going to introduce a new project, there is a possibility that
    # we are syncing back to a state where the project was originally a
    # sub-project rolled by DEPS (realistic case: crossing the Blink merge point
    # syncing backwards, when Blink was a DEPS entry and not part of src.git).
    # In such case, we might have a backup of the former .git folder, which can
    # be used to avoid re-fetching the entire repo again (useful for bisects).
    backup_dir = self.GetGitBackupDirPath()
    target_dir = os.path.join(self.checkout_path, '.git')
    if os.path.exists(backup_dir) and not os.path.exists(target_dir):
      gclient_utils.safe_makedirs(self.checkout_path)
      os.rename(backup_dir, target_dir)
      # Reset to a clean state
      self._Scrub('HEAD', options)

    if (not os.path.exists(self.checkout_path) or
        (os.path.isdir(self.checkout_path) and
         not os.path.exists(os.path.join(self.checkout_path, '.git')))):
      if mirror:
        self._UpdateMirrorIfNotContains(mirror, options, rev_type, revision)
      try:
        self._Clone(revision, url, options)
      except subprocess2.CalledProcessError:
        self._DeleteOrMove(options.force)
        self._Clone(revision, url, options)
      if file_list is not None:
        files = self._Capture(['ls-files']).splitlines()
        file_list.extend([os.path.join(self.checkout_path, f) for f in files])
      if not verbose:
        # Make the output a little prettier. It's nice to have some whitespace
        # between projects when cloning.
        self.Print('')
      return self._Capture(['rev-parse', '--verify', 'HEAD'])

    if not managed:
      self._UpdateBranchHeads(options, fetch=False)
      self.Print('________ unmanaged solution; skipping %s' % self.relpath)
      return self._Capture(['rev-parse', '--verify', 'HEAD'])

    self._maybe_break_locks(options)

    if mirror:
      self._UpdateMirrorIfNotContains(mirror, options, rev_type, revision)

    # See if the url has changed (the unittests use git://foo for the url, let
    # that through).
    current_url = self._Capture(['config', 'remote.%s.url' % self.remote])
    return_early = False
    # TODO(maruel): Delete url != 'git://foo' since it's just to make the
    # unit test pass. (and update the comment above)
    # Skip url auto-correction if remote.origin.gclient-auto-fix-url is set.
    # This allows devs to use experimental repos which have a different url
    # but whose branch(s) are the same as official repos.
    if (current_url.rstrip('/') != url.rstrip('/') and
        url != 'git://foo' and
        subprocess2.capture(
            ['git', 'config', 'remote.%s.gclient-auto-fix-url' % self.remote],
            cwd=self.checkout_path).strip() != 'False'):
      self.Print('_____ switching %s to a new upstream' % self.relpath)
      if not (options.force or options.reset):
        # Make sure it's clean
        self._CheckClean(revision)
      # Switch over to the new upstream
      self._Run(['remote', 'set-url', self.remote, url], options)
      if mirror:
        with open(os.path.join(
            self.checkout_path, '.git', 'objects', 'info', 'alternates'),
            'w') as fh:
          fh.write(os.path.join(url, 'objects'))
      self._EnsureValidHeadObjectOrCheckout(revision, options, url)
      self._FetchAndReset(revision, file_list, options)

      return_early = True
    else:
      self._EnsureValidHeadObjectOrCheckout(revision, options, url)

    if return_early:
      return self._Capture(['rev-parse', '--verify', 'HEAD'])

    cur_branch = self._GetCurrentBranch()

    # Cases:
    # 0) HEAD is detached. Probably from our initial clone.
    #   - make sure HEAD is contained by a named ref, then update.
    # Cases 1-4. HEAD is a branch.
    # 1) current branch is not tracking a remote branch
    #   - try to rebase onto the new hash or branch
    # 2) current branch is tracking a remote branch with local committed
    #    changes, but the DEPS file switched to point to a hash
    #   - rebase those changes on top of the hash
    # 3) current branch is tracking a remote branch w/or w/out changes, and
    #    no DEPS switch
    #   - see if we can FF, if not, prompt the user for rebase, merge, or stop
    # 4) current branch is tracking a remote branch, but DEPS switches to a
    #    different remote branch, and
    #   a) current branch has no local changes, and --force:
    #      - checkout new branch
    #   b) current branch has local changes, and --force and --reset:
    #      - checkout new branch
    #   c) otherwise exit

    # GetUpstreamBranch returns something like 'refs/remotes/origin/master' for
    # a tracking branch
    # or 'master' if not a tracking branch (it's based on a specific rev/hash)
    # or it returns None if it couldn't find an upstream
    if cur_branch is None:
      upstream_branch = None
      current_type = "detached"
      logging.debug("Detached HEAD")
    else:
      upstream_branch = scm.GIT.GetUpstreamBranch(self.checkout_path)
      if not upstream_branch or not upstream_branch.startswith('refs/remotes'):
        current_type = "hash"
        logging.debug("Current branch is not tracking an upstream (remote)"
                      " branch.")
      elif upstream_branch.startswith('refs/remotes'):
        current_type = "branch"
      else:
        raise gclient_utils.Error('Invalid Upstream: %s' % upstream_branch)

    if not scm.GIT.IsValidRevision(self.checkout_path, revision, sha_only=True):
      # Update the remotes first so we have all the refs.
      remote_output = scm.GIT.Capture(['remote'] + verbose + ['update'],
              cwd=self.checkout_path)
      if verbose:
        self.Print(remote_output)

    self._UpdateBranchHeads(options, fetch=True)

    revision = self._AutoFetchRef(options, revision)

    # This is a big hammer, debatable if it should even be here...
    if options.force or options.reset:
      target = 'HEAD'
      if options.upstream and upstream_branch:
        target = upstream_branch
      self._Scrub(target, options)

    if current_type == 'detached':
      # case 0
      # We just did a Scrub, this is as clean as it's going to get. In
      # particular if HEAD is a commit that contains two versions of the same
      # file on a case-insensitive filesystem (e.g. 'a' and 'A'), there's no way
      # to actually "Clean" the checkout; that commit is uncheckoutable on this
      # system. The best we can do is carry forward to the checkout step.
      if not (options.force or options.reset):
        self._CheckClean(revision)
      self._CheckDetachedHead(revision, options)
      if self._Capture(['rev-list', '-n', '1', 'HEAD']) == revision:
        self.Print('Up-to-date; skipping checkout.')
      else:
        # 'git checkout' may need to overwrite existing untracked files. Allow
        # it only when nuclear options are enabled.
        self._Checkout(
            options,
            revision,
            force=(options.force and options.delete_unversioned_trees),
            quiet=True,
        )
      if not printed_path:
        self.Print('_____ %s at %s' % (self.relpath, revision), timestamp=False)
    elif current_type == 'hash':
      # case 1
      # Can't find a merge-base since we don't know our upstream. That makes
      # this command VERY likely to produce a rebase failure. For now we
      # assume origin is our upstream since that's what the old behavior was.
      upstream_branch = self.remote
      if options.revision or deps_revision:
        upstream_branch = revision
      self._AttemptRebase(upstream_branch, file_list, options,
                          printed_path=printed_path, merge=options.merge)
      printed_path = True
    elif rev_type == 'hash':
      # case 2
      self._AttemptRebase(upstream_branch, file_list, options,
                          newbase=revision, printed_path=printed_path,
                          merge=options.merge)
      printed_path = True
    elif remote_ref and ''.join(remote_ref) != upstream_branch:
      # case 4
      new_base = ''.join(remote_ref)
      if not printed_path:
        self.Print('_____ %s at %s' % (self.relpath, revision), timestamp=False)
      switch_error = ("Could not switch upstream branch from %s to %s\n"
                     % (upstream_branch, new_base) +
                     "Please use --force or merge or rebase manually:\n" +
                     "cd %s; git rebase %s\n" % (self.checkout_path, new_base) +
                     "OR git checkout -b <some new branch> %s" % new_base)
      force_switch = False
      if options.force:
        try:
          self._CheckClean(revision)
          # case 4a
          force_switch = True
        except gclient_utils.Error as e:
          if options.reset:
            # case 4b
            force_switch = True
          else:
            switch_error = '%s\n%s' % (e.message, switch_error)
      if force_switch:
        self.Print("Switching upstream branch from %s to %s" %
                   (upstream_branch, new_base))
        switch_branch = 'gclient_' + remote_ref[1]
        self._Capture(['branch', '-f', switch_branch, new_base])
        self._Checkout(options, switch_branch, force=True, quiet=True)
      else:
        # case 4c
        raise gclient_utils.Error(switch_error)
    else:
      # case 3 - the default case
      rebase_files = self._Capture(
          ['diff', upstream_branch, '--name-only']).split()
      if verbose:
        self.Print('Trying fast-forward merge to branch : %s' % upstream_branch)
      try:
        merge_args = ['merge']
        if options.merge:
          merge_args.append('--ff')
        else:
          merge_args.append('--ff-only')
        merge_args.append(upstream_branch)
        merge_output = self._Capture(merge_args)
      except subprocess2.CalledProcessError as e:
        rebase_files = []
        if re.match('fatal: Not possible to fast-forward, aborting.', e.stderr):
          if not printed_path:
            self.Print('_____ %s at %s' % (self.relpath, revision),
                       timestamp=False)
            printed_path = True
          while True:
            if not options.auto_rebase:
              try:
                action = self._AskForData(
                    'Cannot %s, attempt to rebase? '
                    '(y)es / (q)uit / (s)kip : ' %
                        ('merge' if options.merge else 'fast-forward merge'),
                    options)
              except ValueError:
                raise gclient_utils.Error('Invalid Character')
            if options.auto_rebase or re.match(r'yes|y', action, re.I):
              self._AttemptRebase(upstream_branch, rebase_files, options,
                                  printed_path=printed_path, merge=False)
              printed_path = True
              break
            elif re.match(r'quit|q', action, re.I):
              raise gclient_utils.Error("Can't fast-forward, please merge or "
                                        "rebase manually.\n"
                                        "cd %s && git " % self.checkout_path
                                        + "rebase %s" % upstream_branch)
            elif re.match(r'skip|s', action, re.I):
              self.Print('Skipping %s' % self.relpath)
              return
            else:
              self.Print('Input not recognized')
        elif re.match("error: Your local changes to '.*' would be "
                      "overwritten by merge.  Aborting.\nPlease, commit your "
                      "changes or stash them before you can merge.\n",
                      e.stderr):
          if not printed_path:
            self.Print('_____ %s at %s' % (self.relpath, revision),
                       timestamp=False)
            printed_path = True
          raise gclient_utils.Error(e.stderr)
        else:
          # Some other problem happened with the merge
          logging.error("Error during fast-forward merge in %s!" % self.relpath)
          self.Print(e.stderr)
          raise
      else:
        # Fast-forward merge was successful
        if not re.match('Already up-to-date.', merge_output) or verbose:
          if not printed_path:
            self.Print('_____ %s at %s' % (self.relpath, revision),
                       timestamp=False)
            printed_path = True
          self.Print(merge_output.strip())
          if not verbose:
            # Make the output a little prettier. It's nice to have some
            # whitespace between projects when syncing.
            self.Print('')

      if file_list is not None:
        file_list.extend(
            [os.path.join(self.checkout_path, f) for f in rebase_files])

    # If the rebase generated a conflict, abort and ask user to fix
    if self._IsRebasing():
      raise gclient_utils.Error('\n____ %s at %s\n'
                                '\nConflict while rebasing this branch.\n'
                                'Fix the conflict and run gclient again.\n'
                                'See man git-rebase for details.\n'
                                % (self.relpath, revision))

    if verbose:
      self.Print('Checked out revision %s' % self.revinfo(options, (), None),
                 timestamp=False)

    # If --reset and --delete_unversioned_trees are specified, remove any
    # untracked directories.
    if options.reset and options.delete_unversioned_trees:
      # GIT.CaptureStatus() uses 'dit diff' to compare to a specific SHA1 (the
      # merge-base by default), so doesn't include untracked files. So we use
      # 'git ls-files --directory --others --exclude-standard' here directly.
      paths = scm.GIT.Capture(
          ['ls-files', '--directory', '--others', '--exclude-standard'],
          self.checkout_path)
      for path in (p for p in paths.splitlines() if p.endswith('/')):
        full_path = os.path.join(self.checkout_path, path)
        if not os.path.islink(full_path):
          self.Print('_____ removing unversioned directory %s' % path)
          gclient_utils.rmtree(full_path)

    return self._Capture(['rev-parse', '--verify', 'HEAD'])