def Main(self): """Main entry point.""" if self._options.clobber: PrintFlush('@@@BUILD_STEP clobber@@@') file_tools.RemoveDirectoryIfPresent(self._options.source) file_tools.RemoveDirectoryIfPresent(self._options.output) self.SyncAll() self.BuildAll()
def GetDirectory(self, key, path): """Read a directory from storage. Clobbers anything at the destination currently. Args: key: Key to fetch from. path: Path of the directory to write. """ file_tools.RemoveDirectoryIfPresent(path) os.mkdir(path) handle, tmp_tgz = tempfile.mkstemp(prefix='dirstore', suffix='.tmp.tgz') try: os.close(handle) url = self._storage.GetFile(key, tmp_tgz) if url is None: return None # Calling cygtar thru subprocess as it's cwd handling is not currently # usable. subprocess.check_call([ sys.executable, CYGTAR_PATH, '-x', '-z', '-f', os.path.abspath(tmp_tgz) ], cwd=os.path.abspath(path)) return url finally: os.remove(tmp_tgz)
def SyncGitRepo(self, package): """Sync the git repo for a package. Args: package: Package name to sync. """ PrintFlush('@@@BUILD_STEP sync %s@@@' % package) package_info = self._packages[package] url = package_info['git_url'] revision = package_info['git_revision'] destination = os.path.join(self._options.source, package) logging.info('Syncing %s...' % package) if self._options.reclone: file_tools.RemoveDirectoryIfPresent(destination) if sys.platform == 'win32': # On windows, we want to use the depot_tools version of git, which has # git.bat as an entry point. When running through the msys command # prompt, subprocess does not handle batch files. Explicitly invoking # cmd.exe to be sure we run the correct git in this case. git = ['cmd.exe', '/c', 'git.bat'] else: git = ['git'] if not os.path.exists(destination): logging.info('Cloning %s...' % package) log_tools.CheckCall(git + ['clone', '-n', url, destination]) if self._options.pinned: logging.info('Checking out pinned revision...') log_tools.CheckCall(git + ['fetch', '--all'], cwd=destination) log_tools.CheckCall(git + ['checkout', '-f', revision], cwd=destination) log_tools.CheckCall(git + ['clean', '-dffx'], cwd=destination) logging.info('Done syncing %s.' % package)
def ValidateGitRepo(url, directory, clobber_mismatch=False, logger=None): """Validates a git repository tracks a particular URL. Given a git directory, this function will validate if the git directory actually tracks an expected URL. If the directory does not exist nothing will be done. Args: url: URL to look for. directory: Directory to look for. clobber_mismatch: If True, will delete invalid directories instead of raising an exception. """ if logger is None: logger = log_tools.GetConsoleLogger() git_dir = os.path.join(directory, '.git') if os.path.exists(git_dir): try: if IsURLInRemoteRepoList(url, directory, include_fetch=True, include_push=False): return logger.warn('Local git repo (%s) does not track url (%s)', directory, url) except: logger.error('Invalid git repo: %s', directory) if not clobber_mismatch: raise InvalidRepoException(url, 'Invalid local git repo: %s', directory) else: logger.debug('Clobbering invalid git repo %s' % directory) file_tools.RemoveDirectoryIfPresent(directory) elif os.path.exists(directory) and len(os.listdir(directory)) != 0: if not clobber_mismatch: raise InvalidRepoException( url, 'Invalid non-empty repository destination %s', directory) else: logger.debug('Clobbering intended repository destination: %s', directory) file_tools.RemoveDirectoryIfPresent(directory)
def copyTree(subst, src, dst, exclude): src = subst.SubstituteAbsPaths(src) dst = subst.SubstituteAbsPaths(dst) def ignoreExcludes(dir, files): if dir == src: return exclude else: return [] file_tools.RemoveDirectoryIfPresent(dst) shutil.copytree(src, dst, symlinks=True, ignore=ignoreExcludes)
def BuildPackage(self, package): """Build a single package. Assumes dependencies of the package have been built. Args: package: Package to build. """ PrintFlush('@@@BUILD_STEP build %s@@@' % package) package_info = self._packages[package] dependencies = package_info.get('dependencies', []) # Collect a dict of all the inputs. inputs = {} # Add in either a tar source or a git source. if 'tar_src' in package_info: inputs['src'] = os.path.join(ROOT_DIR, package_info['tar_src']) else: inputs['src'] = os.path.join(self._options.source, package) # Add in each dependency by package name. for dependency in dependencies: inputs[dependency] = os.path.join(self._options.output, dependency + '_install') # Each package generates intermediate into output/<PACKAGE>_work. # Clobbered here explicitly. work_dir = os.path.join(self._options.output, package + '_work') file_tools.RemoveDirectoryIfPresent(work_dir) os.mkdir(work_dir) # Each package emits its output to output/<PACKAGE>_install. # Clobbered implicitly by Run(). output = os.path.join(self._options.output, package + '_install') # A package may define an alternate set of inputs to be used for # computing the build signature. These are assumed to be in the working # directory. hashed_inputs = package_info.get('hashed_inputs') if hashed_inputs is not None: for key, value in hashed_inputs.iteritems(): hashed_inputs[key] = os.path.join(work_dir, value) # Do it. self._build_once.Run(package, inputs, output, commands=package_info.get('commands', []), unpack_commands=package_info.get( 'unpack_commands', []), hashed_inputs=hashed_inputs, working_dir=work_dir)
def WriteResultToCache(self, package, build_signature, output): """Cache a computed result by key. Also prints URLs when appropriate. Args: package: Package name (for tgz name). build_signature: The input hash of the computation. output: A path containing the output of the computation. """ if not self._cache_results: return out_hash = hashing_tools.StableHashPath(output) try: output_key = self.KeyForOutput(package, out_hash) # Try to get an existing copy in a temporary directory. wd = working_directory.TemporaryWorkingDirectory() with wd as work_dir: temp_output = os.path.join(work_dir, 'out') url = self._directory_storage.GetDirectory( output_key, temp_output) if url is None: # Isn't present. Cache the computed result instead. url = self._directory_storage.PutDirectory( output, output_key) logging.info('Computed fresh result and cached it.') else: # Cached version is present. Replace the current output with that. file_tools.RemoveDirectoryIfPresent(output) shutil.move(temp_output, output) logging.info('Recomputed result matches cached value, ' 'using cached value instead.') # Upload an entry mapping from computation input to output hash. self._storage.PutData(out_hash, self.KeyForBuildSignature(build_signature)) self.PrintDownloadURL(url) except gsd_storage.GSDStorageError: logging.info('Failed to cache result.') raise
def SyncGitRepo(url, destination, revision, reclone=False, clean=False, pathspec=None): """Sync an individual git repo. Args: url: URL to sync destination: Directory to check out into. revision: Pinned revision to check out. If None, do not check out a pinned revision. reclone: If True, delete the destination directory and re-clone the repo. clean: If True, discard local changes and untracked files. Otherwise the checkout will fail if there are uncommitted changes. pathspec: If not None, add the path to the git checkout command, which causes it to just update the working tree without switching branches. """ if reclone: logging.debug('Clobbering source directory %s' % destination) file_tools.RemoveDirectoryIfPresent(destination) git = GitCmd() if not os.path.exists(destination) or len(os.listdir(destination)) == 0: logging.info('Cloning %s...' % url) log_tools.CheckCall(git + ['clone', '-n', url, destination]) if revision is not None: logging.info('Checking out pinned revision...') log_tools.CheckCall(git + ['fetch', '--all'], cwd=destination) checkout_flags = ['-f'] if clean else [] path = [pathspec] if pathspec else [] log_tools.CheckCall(git + ['checkout'] + checkout_flags + [revision] + path, cwd=destination) if clean: log_tools.CheckCall(git + ['clean', '-dffx'], cwd=destination)
def GetDirectory(self, key, path, hasher=None): """Read a directory from storage. Clobbers anything at the destination currently. Args: key: Key to fetch from. path: Path of the directory to write. Returns: DirectoryStorageItem of item retrieved, or None on errors. """ if hasher is None: hasher = hashing_tools.HashFileContents file_tools.RemoveDirectoryIfPresent(path) os.mkdir(path) handle, tmp_tgz = tempfile.mkstemp(prefix='dirstore', suffix='.tmp.tgz') try: os.close(handle) url = self._storage.GetFile(key, tmp_tgz) if url is None: return None # Calling cygtar thru subprocess as it's cwd handling is not currently # usable. subprocess.check_call([ sys.executable, CYGTAR_PATH, '-x', '-z', '-f', os.path.abspath(tmp_tgz) ], cwd=os.path.abspath(path)) name = posixpath.basename(key) hash_value = hasher(tmp_tgz) return DirectoryStorageItem(name, hash_value, url) finally: os.remove(tmp_tgz)
def BuildPackage(self, package): """Build a single package. Assumes dependencies of the package have been built. Args: package: Package to build. """ package_info = self._packages[package] # Validate the package description. if 'type' not in package_info: raise Exception('package %s does not have a type' % package) type_text = package_info['type'] if type_text not in ('source', 'build'): raise Execption('package %s has unrecognized type: %s' % (package, type_text)) is_source_target = type_text == 'source' if 'commands' not in package_info: raise Exception('package %s does not have any commands' % package) # Source targets do not run when skipping sync. if is_source_target and not self._options.sync_sources: logging.debug('Sync skipped: not running commands for %s' % package) return PrintFlush('@@@BUILD_STEP %s (%s)@@@' % (package, type_text)) logging.debug('Building %s package %s' % (type_text, package)) dependencies = package_info.get('dependencies', []) # Collect a dict of all the inputs. inputs = {} # Add in explicit inputs. if 'inputs' in package_info: for key, value in package_info['inputs'].iteritems(): if key in dependencies: raise Exception('key "%s" found in both dependencies and inputs of ' 'package "%s"' % (key, package)) inputs[key] = value else: inputs['src'] = os.path.join(self._options.source, package) # Add in each dependency by package name. for dependency in dependencies: inputs[dependency] = self.GetOutputDir(dependency, True) # Each package generates intermediate into output/<PACKAGE>_work. # Clobbered here explicitly. work_dir = os.path.join(self._options.output, package + '_work') if self._options.clobber: logging.debug('Clobbering working directory %s' % work_dir) file_tools.RemoveDirectoryIfPresent(work_dir) file_tools.MakeDirectoryIfAbsent(work_dir) output = self.GetOutputDir(package, False) output_subdir = self.GetOutputDir(package, True) if not is_source_target or self._options.clobber_source: logging.debug('Clobbering output directory %s' % output) file_tools.RemoveDirectoryIfPresent(output) os.makedirs(output_subdir) commands = package_info.get('commands', []) if not self._options.clobber and len(os.listdir(work_dir)) > 0: commands = [cmd for cmd in commands if not (hasattr(cmd, 'skip_for_incremental') and cmd.skip_for_incremental)] # Do it. self._build_once.Run( package, inputs, output, commands=commands, working_dir=work_dir, memoize=not is_source_target, signature_file=self._signature_file, subdir=output_subdir) if not is_source_target and self._options.install: file_tools.CopyTree(output, self._options.install)
def remove(subst, path): file_tools.RemoveDirectoryIfPresent(subst.SubstituteAbsPaths(path))
def Run(self, package, inputs, output, commands, unpack_commands=None, hashed_inputs=None, working_dir=None): """Run an operation once, possibly hitting cache. Args: package: Name of the computation/module. inputs: A dict of names mapped to files that are inputs. output: An output directory. commands: A list of command.Command objects to run. unpack_commands: A list of command.Command object to run before computing the build hash. Or None. hashed_inputs: An alternate dict of inputs to use for hashing and after the packing stage (or None). working_dir: Working directory to use, or None for a temp dir. """ if working_dir is None: wdm = working_directory.TemporaryWorkingDirectory() else: wdm = working_directory.FixedWorkingDirectory(working_dir) # Cleanup destination. file_tools.RemoveDirectoryIfPresent(output) os.mkdir(output) with wdm as work_dir: # Optionally unpack before hashing. if unpack_commands is not None: for command in unpack_commands: command.Invoke(check_call=self._check_call, package=package, cwd=work_dir, inputs=inputs, output=output) # Use an alternate input set from here on. if hashed_inputs is not None: inputs = hashed_inputs # Compute the build signature with modified inputs. build_signature = self.BuildSignature(package, inputs=inputs, commands=commands) # We're done if it's in the cache. if self.ReadMemoizedResultFromCache(package, build_signature, output): return for command in commands: command.Invoke(check_call=self._check_call, package=package, cwd=work_dir, inputs=inputs, output=output, build_signature=build_signature) self.WriteResultToCache(package, build_signature, output)
def SyncGitRepo(url, destination, revision, reclone=False, pathspec=None, git_cache=None, push_url=None, logger=None): """Sync an individual git repo. Args: url: URL to sync destination: Directory to check out into. revision: Pinned revision to check out. If None, do not check out a pinned revision. reclone: If True, delete the destination directory and re-clone the repo. pathspec: If not None, add the path to the git checkout command, which causes it to just update the working tree without switching branches. git_cache: If set, assumes URL has been populated within the git cache directory specified and sets the fetch URL to be from the git_cache. """ if logger is None: logger = log_tools.GetConsoleLogger() if reclone: logger.debug('Clobbering source directory %s' % destination) file_tools.RemoveDirectoryIfPresent(destination) if git_cache: git_cache_url = GetGitCacheURL(git_cache, url) else: git_cache_url = None # If the destination is a git repository, validate the tracked origin. git_dir = os.path.join(destination, '.git') if os.path.exists(git_dir): if not IsURLInRemoteRepoList( url, destination, include_fetch=True, include_push=False): # If the git cache URL is being tracked instead of the fetch URL, we # can safely redirect it to the fetch URL instead. if git_cache_url and IsURLInRemoteRepoList(git_cache_url, destination, include_fetch=True, include_push=False): GitSetRemoteRepo(url, destination, push_url=push_url, logger=logger) else: logger.error('Git Repo (%s) does not track URL: %s', destination, url) raise InvalidRepoException(url, 'Could not sync git repo: %s', destination) # Make sure the push URL is set correctly as well. if not IsURLInRemoteRepoList( push_url, destination, include_fetch=False, include_push=True): GitSetRemoteRepo(url, destination, push_url=push_url) git = GitCmd() if not os.path.exists(git_dir): logger.info('Cloning %s...' % url) file_tools.MakeDirectoryIfAbsent(destination) clone_args = ['clone', '-n'] if git_cache_url: clone_args.extend(['--reference', git_cache_url]) log_tools.CheckCall(git + clone_args + [url, '.'], logger=logger, cwd=destination) if url != push_url: GitSetRemoteRepo(url, destination, push_url=push_url, logger=logger) # If a git cache URL is supplied, make sure it is setup as a git alternate. if git_cache_url: git_alternates = [git_cache_url] else: git_alternates = [] GitSetRepoAlternates(destination, git_alternates, append=False, logger=logger) if revision is not None: logger.info('Checking out pinned revision...') log_tools.CheckCall(git + ['fetch', '--all'], logger=logger, cwd=destination) path = [pathspec] if pathspec else [] log_tools.CheckCall(git + ['checkout', revision] + path, logger=logger, cwd=destination)