示例#1
0
 def Main(self):
   """Main entry point."""
   file_tools.MakeDirectoryIfAbsent(self._options.source)
   file_tools.MakeDirectoryIfAbsent(self._options.output)
   log_tools.SetupLogging(self._options.verbose,
                          open(os.path.join(self._options.output,
                                            'toolchain_build.log'), 'w'))
   self.BuildAll()
示例#2
0
 def SyncAll(self):
     """Sync all packages selected and their dependencies."""
     file_tools.MakeDirectoryIfAbsent(self._options.source)
     for target in self._targets:
         # Only packages using git repos need to be synced.
         if 'git_url' in self._packages[target]:
             self.SyncGitRepo(target)
示例#3
0
  def __init__(self, cache_path, storage):
    """Init for this class.

    Args:
      cache_path: Path to a database to store a local cache in.
      storage: A GSDStorage style object to fallback to for reads.
    """
    self._cache_path = os.path.abspath(cache_path)
    file_tools.MakeDirectoryIfAbsent(self._cache_path)
    self._storage = storage
示例#4
0
def PopulateGitCache(cache_dir, url_list, logger=None):
    """Fetches a git repo that combines a list of git repos.

  This is an interface to the "git cache" command found within depot_tools.
  You can populate a cache directory then obtain the local cache url using
  GetGitCacheURL(). It is best to sync with the shared option so that the
  cloned repository shares the same git objects.

  Args:
    cache_dir: Local directory where git cache will be populated.
    url_list: List of URLs which cache_dir should be populated with.
  """
    if url_list:
        file_tools.MakeDirectoryIfAbsent(cache_dir)
        git = GitCmd()
        for url in url_list:
            log_tools.CheckCall(git + ['cache', 'populate', '-c', '.', url],
                                logger=logger,
                                cwd=cache_dir)
示例#5
0
  def BuildPackage(self, package):
    """Build a single package.

    Assumes dependencies of the package have been built.
    Args:
      package: Package to build.
    """

    package_info = self._packages[package]

    # Validate the package description.
    if 'type' not in package_info:
      raise Exception('package %s does not have a type' % package)
    type_text = package_info['type']
    if type_text not in ('source', 'build'):
      raise Execption('package %s has unrecognized type: %s' %
                      (package, type_text))
    is_source_target = type_text == 'source'

    if 'commands' not in package_info:
      raise Exception('package %s does not have any commands' % package)

    # Source targets do not run when skipping sync.
    if is_source_target and not self._options.sync_sources:
      logging.debug('Sync skipped: not running commands for %s' % package)
      return

    PrintFlush('@@@BUILD_STEP %s (%s)@@@' % (package, type_text))
    logging.debug('Building %s package %s' % (type_text, package))

    dependencies = package_info.get('dependencies', [])

    # Collect a dict of all the inputs.
    inputs = {}
    # Add in explicit inputs.
    if 'inputs' in package_info:
      for key, value in package_info['inputs'].iteritems():
        if key in dependencies:
          raise Exception('key "%s" found in both dependencies and inputs of '
                          'package "%s"' % (key, package))
        inputs[key] = value
    else:
      inputs['src'] = os.path.join(self._options.source, package)
    # Add in each dependency by package name.
    for dependency in dependencies:
      inputs[dependency] = self.GetOutputDir(dependency, True)

    # Each package generates intermediate into output/<PACKAGE>_work.
    # Clobbered here explicitly.
    work_dir = os.path.join(self._options.output, package + '_work')
    if self._options.clobber:
      logging.debug('Clobbering working directory %s' % work_dir)
      file_tools.RemoveDirectoryIfPresent(work_dir)
    file_tools.MakeDirectoryIfAbsent(work_dir)

    output = self.GetOutputDir(package, False)
    output_subdir = self.GetOutputDir(package, True)

    if not is_source_target or self._options.clobber_source:
      logging.debug('Clobbering output directory %s' % output)
      file_tools.RemoveDirectoryIfPresent(output)
      os.makedirs(output_subdir)

    commands = package_info.get('commands', [])
    if not self._options.clobber and len(os.listdir(work_dir)) > 0:
      commands = [cmd for cmd in commands if
                  not (hasattr(cmd, 'skip_for_incremental') and
                       cmd.skip_for_incremental)]
    # Do it.
    self._build_once.Run(
        package, inputs, output,
        commands=commands,
        working_dir=work_dir,
        memoize=not is_source_target,
        signature_file=self._signature_file,
        subdir=output_subdir)

    if not is_source_target and self._options.install:
      file_tools.CopyTree(output, self._options.install)
示例#6
0
    def Run(self,
            package,
            inputs,
            output,
            commands,
            working_dir=None,
            memoize=True,
            signature_file=None,
            subdir=None):
        """Run an operation once, possibly hitting cache.

    Args:
      package: Name of the computation/module.
      inputs: A dict of names mapped to files that are inputs.
      output: An output directory.
      commands: A list of command.Command objects to run.
      working_dir: Working directory to use, or None for a temp dir.
      memoize: Boolean indicating the the result should be memoized.
      signature_file: File to write human readable build signatures to or None.
      subdir: If not None, use this directory instead of the output dir as the
              substituter's output path. Must be a subdirectory of output.
    """
        if working_dir is None:
            wdm = working_directory.TemporaryWorkingDirectory()
        else:
            wdm = working_directory.FixedWorkingDirectory(working_dir)

        file_tools.MakeDirectoryIfAbsent(output)

        nonpath_subst = {'package': package}

        with wdm as work_dir:
            # Compute the build signature with modified inputs.
            build_signature = self.BuildSignature(package,
                                                  inputs=inputs,
                                                  commands=commands)
            # Optionally write human readable version of signature.
            if signature_file:
                signature_file.write(
                    self.BuildSignature(package,
                                        inputs=inputs,
                                        commands=commands,
                                        hasher=HumanReadableSignature()))
                signature_file.flush()

            # We're done if it's in the cache.
            if (memoize and self.ReadMemoizedResultFromCache(
                    package, build_signature, output)):
                return

            if subdir:
                assert subdir.startswith(output)

            for command in commands:
                paths = inputs.copy()
                paths['output'] = subdir if subdir else output
                nonpath_subst['build_signature'] = build_signature
                subst = substituter.Substituter(work_dir, paths, nonpath_subst)
                command.Invoke(subst)

        if memoize:
            self.WriteResultToCache(package, build_signature, output)
示例#7
0
def SyncGitRepo(url,
                destination,
                revision,
                reclone=False,
                pathspec=None,
                git_cache=None,
                push_url=None,
                logger=None):
    """Sync an individual git repo.

  Args:
  url: URL to sync
  destination: Directory to check out into.
  revision: Pinned revision to check out. If None, do not check out a
            pinned revision.
  reclone: If True, delete the destination directory and re-clone the repo.
  pathspec: If not None, add the path to the git checkout command, which
            causes it to just update the working tree without switching
            branches.
  git_cache: If set, assumes URL has been populated within the git cache
             directory specified and sets the fetch URL to be from the
             git_cache.
  """
    if logger is None:
        logger = log_tools.GetConsoleLogger()
    if reclone:
        logger.debug('Clobbering source directory %s' % destination)
        file_tools.RemoveDirectoryIfPresent(destination)

    if git_cache:
        git_cache_url = GetGitCacheURL(git_cache, url)
    else:
        git_cache_url = None

    # If the destination is a git repository, validate the tracked origin.
    git_dir = os.path.join(destination, '.git')
    if os.path.exists(git_dir):
        if not IsURLInRemoteRepoList(
                url, destination, include_fetch=True, include_push=False):
            # If the git cache URL is being tracked instead of the fetch URL, we
            # can safely redirect it to the fetch URL instead.
            if git_cache_url and IsURLInRemoteRepoList(git_cache_url,
                                                       destination,
                                                       include_fetch=True,
                                                       include_push=False):
                GitSetRemoteRepo(url,
                                 destination,
                                 push_url=push_url,
                                 logger=logger)
            else:
                logger.error('Git Repo (%s) does not track URL: %s',
                             destination, url)
                raise InvalidRepoException(url, 'Could not sync git repo: %s',
                                           destination)

            # Make sure the push URL is set correctly as well.
            if not IsURLInRemoteRepoList(
                    push_url, destination, include_fetch=False,
                    include_push=True):
                GitSetRemoteRepo(url, destination, push_url=push_url)

    git = GitCmd()
    if not os.path.exists(git_dir):
        logger.info('Cloning %s...' % url)

        file_tools.MakeDirectoryIfAbsent(destination)
        clone_args = ['clone', '-n']
        if git_cache_url:
            clone_args.extend(['--reference', git_cache_url])

        log_tools.CheckCall(git + clone_args + [url, '.'],
                            logger=logger,
                            cwd=destination)

        if url != push_url:
            GitSetRemoteRepo(url,
                             destination,
                             push_url=push_url,
                             logger=logger)

    # If a git cache URL is supplied, make sure it is setup as a git alternate.
    if git_cache_url:
        git_alternates = [git_cache_url]
    else:
        git_alternates = []

    GitSetRepoAlternates(destination,
                         git_alternates,
                         append=False,
                         logger=logger)

    if revision is not None:
        logger.info('Checking out pinned revision...')
        log_tools.CheckCall(git + ['fetch', '--all'],
                            logger=logger,
                            cwd=destination)
        path = [pathspec] if pathspec else []
        log_tools.CheckCall(git + ['checkout', revision] + path,
                            logger=logger,
                            cwd=destination)
示例#8
0
 def BuildAll(self):
     """Build all packages selected and their dependencies."""
     file_tools.MakeDirectoryIfAbsent(self._options.output)
     for target in self._targets:
         self.BuildPackage(target)