def _build_config(self) -> None: if self.remove_cmake_cache: self._rm_cmake_cache(self.output_dir) if self.remove_install_dir and self.install_dir.exists(): shutil.rmtree(self.install_dir) cmake_cmd: List[str] = [str(paths.CMAKE_BIN_PATH), '-G', 'Ninja'] cmake_cmd.extend(f'-D{key}={val}' for key, val in self.cmake_defines.items()) cmake_cmd.append(str(self.src_dir)) self.output_dir.mkdir(parents=True, exist_ok=True) env = self.env utils.create_script(self.output_dir / 'cmake_invocation.sh', cmake_cmd, env) utils.check_call(cmake_cmd, cwd=self.output_dir, env=env) ninja_cmd: List[str] = [str(paths.NINJA_BIN_PATH)] ninja_cmd.extend(self.ninja_targets) utils.check_call(ninja_cmd, cwd=self.output_dir, env=env) self.install_config()
def _build_config(self) -> None: logger().info('Building %s for %s', self.name, self._config) if self.remove_install_dir and self.install_dir.exists(): shutil.rmtree(self.install_dir) self.output_dir.mkdir(parents=True, exist_ok=True) self._touch_autoconfig_files() env = self.env cflags = self._config.cflags + self.cflags cxxflags = self._config.cxxflags + self.cxxflags ldflags = self._config.ldflags + self.ldflags env['CC'] = ' '.join([str(self.toolchain.cc)] + cflags + ldflags) env['CXX'] = ' '.join([str(self.toolchain.cxx)] + cxxflags + ldflags) config_cmd = [ self.src_dir / 'configure', f'--prefix={self.install_dir}' ] config_cmd.extend(self.config_flags) utils.check_call(config_cmd, cwd=self.output_dir, env=env) make_cmd = ['make', f'-j{multiprocessing.cpu_count()}'] utils.check_call(make_cmd, cwd=self.output_dir) self.install()
def fetch_artifact(branch, target, build, pattern): fetch_artifact_path = '/google/data/ro/projects/android/fetch_artifact' cmd = [ fetch_artifact_path, f'--branch={branch}', f'--target={target}', f'--bid={build}', pattern ] utils.check_call(cmd)
def _command_jenkinsrun(args): '''Run task as part of a jenkins job.''' p = _make_task_argparser('jenkinsrun') p.add_argument('-s', '--submit', action='store_true', default=False, help='Submit results to artifact storage at end of task') argconfig = p.parse_args(args) argconfig.existing = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-jenkinsrun-build', buildconfig, keep_buildconfig=True) if argconfig.submit: build += _mk_submit_results(buildname) with open("run.sh", "w") as runfile: runfile.write(build) retcode = utils.call(['/bin/sh', 'run.sh']) if retcode != 0: sys.stdout.write("*Build failed!* (return code %s)\n" % retcode) sys.stdout.flush() taskdir = os.path.dirname(buildconfig.taskfilename) repro_script = os.path.join(taskdir, 'repro_message.sh') if os.access(repro_script, os.X_OK): utils.check_call( [repro_script, _userdir, buildconfig.taskfilename]) sys.exit(retcode)
def _command_jenkinsrun(args): '''Run task as part of a jenkins job.''' p = _make_task_argparser('jenkinsrun') p.add_argument('-s', '--submit', action='store_true', default=False, help='Submit results to artifact storage at end of task') argconfig = p.parse_args(args) argconfig.existing = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-jenkinsrun-build', buildconfig, keep_buildconfig=True) if argconfig.submit: build += _mk_submit_results(buildname) with open("run.sh", "w") as runfile: runfile.write(build) retcode = utils.call(['/bin/sh', 'run.sh']) if retcode != 0: sys.stdout.write("*Build failed!* (return code %s)\n" % retcode) sys.stdout.flush() taskdir = os.path.dirname(buildconfig.taskfilename) repro_script = os.path.join(taskdir, 'repro_message.sh') if os.access(repro_script, os.X_OK): utils.check_call([repro_script, _userdir, buildconfig.taskfilename]) sys.exit(retcode)
def _check_revision_exists(self, revision, git_project_path): """Checks whether a revision is found in a git project of current tree. Args: revision: string, revision value recorded in manifest.xml git_project_path: string, path relative to ANDROID_BUILD_TOP """ path = utils.join_realpath(self._android_build_top, git_project_path) def _check_rev_list(revision): """Checks whether revision is reachable from HEAD of git project.""" logging.info('Checking if revision {rev} exists in {proj}'.format( rev=revision, proj=git_project_path)) try: cmd = [ 'git', '-C', path, 'rev-list', 'HEAD..{}'.format(revision) ] output = utils.check_output(cmd).strip() except subprocess.CalledProcessError as error: logging.error('Error: {}'.format(error)) return False else: if output: logging.debug( '{proj} does not have the following revisions: {rev}'. format(proj=git_project_path, rev=output)) return False else: logging.info( 'Found revision {rev} in project {proj}'.format( rev=revision, proj=git_project_path)) return True if not _check_rev_list(revision): # VNDK snapshots built from a *-release branch will have merge # CLs in the manifest because the *-dev branch is merged to the # *-release branch periodically. In order to extract the # revision relevant to the source of the git_project_path, # we fetch the *-release branch and get the revision of the # parent commit with FETCH_HEAD^2. logging.info( 'Checking if the parent of revision {rev} exists in {proj}'. format(rev=revision, proj=git_project_path)) try: cmd = ['git', '-C', path, 'fetch', 'goog', revision] utils.check_call(cmd) cmd = ['git', '-C', path, 'rev-parse', 'FETCH_HEAD^2'] parent_revision = utils.check_output(cmd).strip() except subprocess.CalledProcessError as error: logging.error( 'Failed to get parent of revision {rev}: {err}'.format( rev=revision, err=error)) raise else: if not _check_rev_list(parent_revision): return False return True
def _get_artifact_from_url(config, dest_dir): url = config.get('url') if url is None: sys.stderr.write("Missing URL for '%s'\n" % name) sys.exit(1) tar_cmd = "cd %s ; curl -s %s | tar -x" % (quote(dest_dir), quote(url)) utils.check_call(['mkdir', '-p', dest_dir]) utils.check_call(tar_cmd, shell=True)
def compile_python(self): """ Byte-compile all Python files in the tree to .pyc and .pyo """ # I'm not really sure that there is a point in having the .pyc files distributed # but it matches what distutils and Fedora RPM packaging do. check_call(['python', os.path.join(self.topdir, 'tools', 'compiledir.py'), self.treedir]) check_call(['python', "-O", os.path.join(self.topdir, 'tools', 'compiledir.py'), self.treedir])
def commit(branch, build, version): logging.info('Making commit...') utils.check_call(['git', 'add', '.']) message = textwrap.dedent("""\ Update VNDK snapshot v{version} to build {build}. Taken from branch {branch}.""").format( version=version, branch=branch, build=build) utils.check_call(['git', 'commit', '-m', message])
def uploadCL(revision, version, changeId=None): """Upload switchover CL with provided parameters. If changeId is not none, any existing CL with that ChangeId gets updated. """ branch = f'clang-prebuilt-{revision}' message = (f'[DO NOT SUBMIT] Switch to clang {revision} ' + f'{version}.\n\n' + 'For testing\n' + 'Test: N/A\n') if changeId is not None: message += (f'\nChange-Id: {changeId}\n') hashtag = 'chk-' + ''.join(random.sample(string.digits, 8)) @contextlib.contextmanager def chdir_context(directory): prev_dir = os.getcwd() try: os.chdir(directory) yield finally: os.chdir(prev_dir) # Create change: # - repo start # - update clang version in soong # - git commit with chdir_context(test_paths.ANDROID_DIR / 'build' / 'soong'): utils.unchecked_call(['repo', 'abandon', branch, '.']) utils.check_call(['repo', 'sync', '-c', '.']) utils.check_call(['repo', 'start', branch, '.']) soong_filepath = 'cc/config/global.go' SoongCL._switch_clang_version(soong_filepath, revision, version) utils.check_call(['git', 'add', soong_filepath]) utils.check_call(['git', 'commit', '-m', message]) utils.check_call([ 'repo', 'upload', '.', '--current-branch', '--yes', # Answer yes to all safe prompts '--verify', # Run upload hooks without prompting. '--wip', # work in progress '--label=Code-Review-2', # code-review -2 f'--hashtag={hashtag}', ]) json_output = gerrit_query_change(f'hashtag:{hashtag}') if len(json_output) != 1: raise RuntimeError('Upload failed; or hashtag not unique. ' + f'Gerrit query returned {json_output}') return SoongCL.getExistingCL(str(json_output[0]['_number']), revision, version, try_resolve_conflict=False)
def commit(branch, build, version): logger.info('Making commit...') utils.check_call(['git', 'add', '.'], logger) message = textwrap.dedent("""\ Update VNDK snapshot v{version} to build {build}. Taken from branch {branch}.""").format(version=version, branch=branch, build=build) utils.check_call(['git', 'commit', '-m', message], logger)
def extract_profdata() -> Optional[Path]: tar = paths.pgo_profdata_tar() if not tar: return None utils.check_call(['tar', '-jxC', str(paths.OUT_DIR), '-f', str(tar)]) profdata_file = paths.OUT_DIR / paths.pgo_profdata_filename() if not profdata_file.exists(): raise RuntimeError( f'Failed to extract profdata from {tar} to {paths.OUT_DIR}') return profdata_file
def install(self) -> None: super().install() if self._config.target_os.is_darwin: # Updates LC_ID_DYLIB so that users of libedit won't link with absolute path. libedit_path = paths.get_libedit_lib(self.install_dir, self._config.target_os) cmd = [ 'install_name_tool', '-id', f'@rpath/{libedit_path.name}', str(libedit_path) ] utils.check_call(cmd)
def create_cl(new_patches: PatchList): file_list = [p.rel_patch_path for p in new_patches] + ['PATCHES.json'] file_list = [str(paths.SCRIPTS_DIR / 'patches' / f) for f in file_list] check_call(['git', 'add'] + file_list) commit_lines = ['[patches] Cherry pick CLs from upstream', ''] for patch in new_patches: sha = patch.sha[:11] subject = patch.comment if subject.startswith('[UPSTREAM] '): subject = subject[len('[UPSTREAM] '):] commit_lines.append(sha + ' ' + subject) commit_lines += ['', 'Test: N/A'] check_call(['git', 'commit', '-m', '\n'.join(commit_lines)])
def _command_try(args): '''Execute task locally.''' p = _make_task_argparser('try') argconfig = p.parse_args(args) argconfig.local = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-try-build', buildconfig) with tempfile.NamedTemporaryFile(delete=False) as tempf: tempf.write(build) tempf.close() utils.check_call(['./exec-try-build', _userdir, tempf.name, buildname], cwd=_hooksdir) os.unlink(tempf.name)
def _command_try(args): '''Execute task locally.''' p = _make_task_argparser('try') argconfig = p.parse_args(args) argconfig.local = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-try-build', buildconfig) with tempfile.NamedTemporaryFile(delete=False) as tempf: tempf.write(build) tempf.close() utils.check_call(['./exec-try-build', _userdir, tempf.name, buildname], cwd=_hooksdir) os.unlink(tempf.name)
def main(): def _get_patch_path(patch): # Find whether the basename printed by patch_manager.py is a cherry-pick # (patch/cherry/<PATCH>) or a local patch (patch/<PATCH>). cherry = os.path.join(_PATCH_DIR, 'cherry', patch) local = os.path.join(_PATCH_DIR, patch) if os.path.exists(cherry): return cherry elif os.path.exists(local): return local raise RuntimeError(f'Cannot find patch file {patch}') removed_patches = trim_patches_json() if not removed_patches: print('No patches to remove') return removed_patch_paths = [_get_patch_path(p) for p in removed_patches] # Apply the changes to git and commit. os.chdir(_LLVM_ANDROID_PATH) branch_name = f'trim-patches-before-{_SVN_REVISION}' utils.unchecked_call(['repo', 'abandon', branch_name, '.']) utils.check_call(['repo', 'start', branch_name, '.']) utils.check_call(['git', 'add', _PATCH_JSON]) for patch in removed_patch_paths: utils.check_call(['git', 'rm', patch]) message_lines = [ f'Remove patch entries older than {_SVN_REVISION}.', '', 'Test: N/A', ] utils.check_call(['git', 'commit', '-m', '\n'.join(message_lines)])
def _command_submit(args): '''Submit task to jenkins OneOff job.''' p = _make_task_argparser('submit') argconfig = p.parse_args(args) buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-submit-build', buildconfig) build += _mk_submit_results(buildname) with tempfile.NamedTemporaryFile(delete=False) as tempf: tempf.write(build) tempf.close() utils.check_call(['./submit', _userdir, tempf.name, buildname], cwd=_hooksdir) os.unlink(tempf.name)
def compile_python(self): """ Byte-compile all Python files in the tree to .pyc and .pyo """ # I'm not really sure that there is a point in having the .pyc files distributed # but it matches what distutils and Fedora RPM packaging do. check_call([ 'python', os.path.join(self.topdir, 'tools', 'compiledir.py'), self.treedir ]) check_call([ 'python', "-O", os.path.join(self.topdir, 'tools', 'compiledir.py'), self.treedir ])
def _command_submit(args): '''Submit task to jenkins OneOff job.''' p = _make_task_argparser('submit') argconfig = p.parse_args(args) buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-submit-build', buildconfig) build += _mk_submit_results(buildname) with tempfile.NamedTemporaryFile(delete=False) as tempf: tempf.write(build) tempf.close() utils.check_call(['./submit', _userdir, tempf.name, buildname], cwd=_hooksdir) os.unlink(tempf.name)
def update_lib_id(self) -> None: """Util function to update lib paths on mac.""" if self.static_lib: return if not self._config.target_os.is_darwin: return for lib in self.link_libraries: # Update LC_ID_DYLIB, so that users of the library won't link with absolute path. utils.check_call( ['install_name_tool', '-id', f'@rpath/{lib.name}', str(lib)]) # The lib may already reference other libs. for other_lib in self.link_libraries: utils.check_call([ 'install_name_tool', '-change', str(other_lib), f'@rpath/{other_lib.name}', str(lib) ])
def _command_sshrun(args): '''Run task by logging into a remote machine with ssh.''' p = _make_task_argparser('sshrun', hostname_arg=True) argconfig = p.parse_args(args) argconfig.local = True argconfig.rewrite_local = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-sshrun-build', buildconfig) run_file = tempfile.NamedTemporaryFile(prefix=buildname, delete=False) with run_file: run_file.write(build) run_file.close() try: utils.check_call(['./sshrun', argconfig.hostname, run_file.name], cwd=_hooksdir) finally: os.unlink(run_file.name)
def _command_sshrun(args): '''Run task by logging into a remote machine with ssh.''' p = _make_task_argparser('sshrun', hostname_arg=True) argconfig = p.parse_args(args) argconfig.local = True argconfig.rewrite_local = True buildconfig = _make_buildconfig(argconfig) build, buildname = _make_buildscript('./mk-sshrun-build', buildconfig) run_file = tempfile.NamedTemporaryFile(prefix=buildname, delete=False) with run_file: run_file.write(build) run_file.close() try: utils.check_call(['./sshrun', argconfig.hostname, run_file.name], cwd=_hooksdir) finally: os.unlink(run_file.name)
def mergeProfiles(self): stage1_install = paths.OUT_DIR / 'stage1-install' profdata_tool = stage1_install / 'bin' / 'llvm-profdata' profdata_dir = paths.OUT_DIR profdata_filename = paths.pgo_profdata_filename() utils.check_call([ str(profdata_tool), 'merge', '-o', str(profdata_dir / profdata_filename), str(self.profiles_dir) ]) dist_dir = Path(os.environ.get('DIST_DIR', paths.OUT_DIR)) utils.check_call([ 'tar', '-cjC', str(profdata_dir), profdata_filename, '-f', str(dist_dir / paths.pgo_profdata_tarname()) ])
def main(): logging.basicConfig(level=logging.DEBUG) args = parse_args() if args.clang_path is not None: clang_path = Path(args.clang_path) elif args.clang_package_path is not None: clang_path = extract_packaged_clang(Path(args.clang_package_path)) else: cmd = [paths.SCRIPTS_DIR / 'build.py', '--no-build=windows,lldb'] if args.profile: cmd.append('--build-instrumented') utils.check_call(cmd) clang_path = paths.get_package_install_path(hosts.build_host(), 'clang-dev') clang_version = extract_clang_version(clang_path) link_clang(Path(args.android_path), clang_path) if args.build_only: profiler = ClangProfileHandler() if args.profile else None targets = [args.target] if args.target else TARGETS for target in targets: build_target(Path(args.android_path), clang_version, target, args.jobs, args.redirect_stderr, args.with_tidy, profiler) if profiler is not None: invoke_llvm_tools(profiler) profiler.mergeProfiles() else: devices = get_connected_device_list() if len(devices) == 0: print("You don't have any devices connected.") for device in devices: result = test_device( Path(args.android_path), clang_version, device, args.jobs, args.clean_built_target, Path(args.flashall_path) if args.flashall_path else None, args.redirect_stderr, args.with_tidy) if not result and not args.keep_going: break
def package(build_name: str) -> None: # Build merge_kzips using soong utils.check_call([ 'build/soong/soong_ui.bash', '--build-mode', '--all-modules', f'--dir={paths.ANDROID_DIR}', '-k', 'merge_zips' ]) merge_zips_path = (paths.OUT_DIR / 'soong' / 'host' / hosts.build_host().os_tag / 'bin' / 'merge_zips') # Call: merge_zips $DIST_DIR/<build_name>.kzip <kzip files> output = os.path.join(utils.ORIG_ENV.get('DIST_DIR', paths.OUT_DIR), build_name + '.kzip') kythe_out_dir = paths.KYTHE_OUTPUT_DIR kzip_files = [ os.path.join(kythe_out_dir, kzip) for kzip in os.listdir(kythe_out_dir) ] utils.check_call([str(merge_zips_path), output] + kzip_files)
def install_wrappers(llvm_install_path: Path, llvm_next=False) -> None: wrapper_path = paths.OUT_DIR / 'llvm_android_wrapper' wrapper_build_script = paths.TOOLCHAIN_UTILS_DIR / 'compiler_wrapper' / 'build.py' # Note: The build script automatically determines the architecture # based on the host. go_env = dict(os.environ) go_env['PATH'] = str(paths.GO_BIN_PATH) + os.pathsep + go_env['PATH'] utils.check_call([ sys.executable, wrapper_build_script, '--config=android', '--use_ccache=false', '--use_llvm_next=' + str(llvm_next).lower(), f'--output_file={wrapper_path}' ], env=go_env) bisect_path = paths.SCRIPTS_DIR / 'bisect_driver.py' bin_path = llvm_install_path / 'bin' clang_path = bin_path / 'clang' clang_real_path = bin_path / 'clang.real' clangxx_path = bin_path / 'clang++' clangxx_real_path = bin_path / 'clang++.real' clang_tidy_path = bin_path / 'clang-tidy' clang_tidy_real_path = bin_path / 'clang-tidy.real' # Rename clang and clang++ to clang.real and clang++.real. # clang and clang-tidy may already be moved by this script if we use a # prebuilt clang. So we only move them if clang.real and clang-tidy.real # doesn't exist. if not clang_real_path.exists(): clang_path.rename(clang_real_path) clang_tidy_real_path = clang_tidy_path.parent / (clang_tidy_path.name + '.real') if not clang_tidy_real_path.exists(): clang_tidy_path.rename(clang_tidy_real_path) clang_path.unlink(missing_ok=True) clangxx_path.unlink(missing_ok=True) clang_tidy_path.unlink(missing_ok=True) clangxx_real_path.unlink(missing_ok=True) clangxx_real_path.symlink_to('clang.real') shutil.copy2(wrapper_path, clang_path) shutil.copy2(wrapper_path, clangxx_path) shutil.copy2(wrapper_path, clang_tidy_path) shutil.copy2(bisect_path, bin_path)
def merge_projects(sha, revision, create_new_branch, dry_run): path = paths.TOOLCHAIN_LLVM_PATH if not dry_run: sync_branch(path) fetch_upstream(path) print('Project llvm-project svn: %d sha: %s' % (revision, sha)) if create_new_branch: branch_name = 'merge-upstream-r%d' % revision utils.check_call(['repo', 'start', branch_name, '.'], cwd=path, dry_run=dry_run) # Merge upstream revision utils.check_call([ 'git', 'merge', '--quiet', sha, '-m', 'Merge %s for LLVM update to %d' % (sha, revision) ], cwd=path, dry_run=dry_run)
def getNewCL(build_number, branch): """Upload prebuilts from a particular build number.""" logging.info(f'Uploading prebuilts CL for build {build_number}') # Add a random hashtag so we can discover the CL number. hashtag = 'chk-' + ''.join(random.sample(string.digits, 8)) utils.check_call([ str(test_paths.LLVM_ANDROID_DIR / 'update-prebuilts.py'), f'--branch={branch}', '--overwrite', '--host=linux-x86', '--repo-upload', f'--hashtag={hashtag}', build_number, ]) json_output = gerrit_query_change(f'hashtag:{hashtag}') if len(json_output) != 1: raise RuntimeError('Upload failed; or hashtag not unique. ' + f'Gerrit query returned {json_output}') return PrebuiltCL.getExistingCL(str(json_output[0]['_number']))
def generate_patch_files(sha_list: List[str], start_version: int) -> PatchList: """ generate upstream cherry-pick patch files """ upstream_dir = paths.TOOLCHAIN_LLVM_PATH fetch_upstream_once() result = PatchList() for sha in sha_list: if len(sha) < 40: sha = get_full_sha(upstream_dir, sha) file_path = paths.SCRIPTS_DIR / 'patches' / 'cherry' / f'{sha}.patch' assert not file_path.exists(), f'{file_path} already exists' with open(file_path, 'w') as fh: check_call(f'git format-patch -1 {sha} --stdout', stdout=fh, shell=True, cwd=upstream_dir) commit_subject = check_output( f'git log -n1 --format=%s {sha}', shell=True, cwd=upstream_dir) comment = '[UPSTREAM] ' + commit_subject.strip() rel_patch_path = f'cherry/{sha}.patch' end_version = sha_to_revision(sha) result.append(PatchItem(comment, rel_patch_path, start_version, end_version)) return result
def install_wrappers(llvm_install_path): wrapper_path = utils.out_path('llvm_android_wrapper') wrapper_build_script = utils.android_path('external', 'toolchain-utils', 'compiler_wrapper', 'build.py') # Note: The build script automatically determines the architecture # based on the host. go_env = dict(os.environ) go_env['PATH'] = go_bin_dir() + ':' + go_env['PATH'] utils.check_call([sys.executable, wrapper_build_script, '--config=android', '--use_ccache=false', '--use_llvm_next=' + str(BUILD_LLVM_NEXT).lower(), '--output_file=' + wrapper_path], env=go_env) bisect_path = utils.android_path('toolchain', 'llvm_android', 'bisect_driver.py') bin_path = os.path.join(llvm_install_path, 'bin') clang_path = os.path.join(bin_path, 'clang') clangxx_path = os.path.join(bin_path, 'clang++') clang_tidy_path = os.path.join(bin_path, 'clang-tidy') # Rename clang and clang++ to clang.real and clang++.real. # clang and clang-tidy may already be moved by this script if we use a # prebuilt clang. So we only move them if clang.real and clang-tidy.real # doesn't exist. if not os.path.exists(clang_path + '.real'): shutil.move(clang_path, clang_path + '.real') if not os.path.exists(clang_tidy_path + '.real'): shutil.move(clang_tidy_path, clang_tidy_path + '.real') utils.remove(clang_path) utils.remove(clangxx_path) utils.remove(clang_tidy_path) utils.remove(clangxx_path + '.real') os.symlink('clang.real', clangxx_path + '.real') shutil.copy2(wrapper_path, clang_path) shutil.copy2(wrapper_path, clangxx_path) shutil.copy2(wrapper_path, clang_tidy_path) install_file(bisect_path, bin_path)
def install_snapshot(branch, build, local_dir, install_dir, temp_artifact_dir): """Installs VNDK snapshot build artifacts to prebuilts/vndk/v{version}. 1) Fetch build artifacts from Android Build server or from local_dir 2) Unzip build artifacts Args: branch: string or None, branch name of build artifacts build: string or None, build number of build artifacts local_dir: string or None, local dir to pull artifacts from install_dir: string, directory to install VNDK snapshot temp_artifact_dir: string, temp directory to hold build artifacts fetched from Android Build server. For 'local' option, is set to None. """ artifact_pattern = 'android-vndk-*.zip' if branch and build: artifact_dir = temp_artifact_dir os.chdir(temp_artifact_dir) logger.info('Fetching {pattern} from {branch} (bid: {build})'.format( pattern=artifact_pattern, branch=branch, build=build)) utils.fetch_artifact(branch, build, artifact_pattern) manifest_pattern = 'manifest_{}.xml'.format(build) logger.info('Fetching {file} from {branch} (bid: {build})'.format( file=manifest_pattern, branch=branch, build=build)) utils.fetch_artifact(branch, build, manifest_pattern, utils.MANIFEST_FILE_NAME) os.chdir(install_dir) elif local_dir: logger.info('Fetching local VNDK snapshot from {}'.format(local_dir)) artifact_dir = local_dir artifacts = glob.glob(os.path.join(artifact_dir, artifact_pattern)) for artifact in artifacts: logger.info('Unzipping VNDK snapshot: {}'.format(artifact)) utils.check_call(['unzip', '-qn', artifact, '-d', install_dir], logger)
def install_snapshot(branch, build, local_dir, install_dir, temp_artifact_dir): """Installs VNDK snapshot build artifacts to prebuilts/vndk/v{version}. 1) Fetch build artifacts from Android Build server or from local_dir 2) Unzip build artifacts Args: branch: string or None, branch name of build artifacts build: string or None, build number of build artifacts local_dir: string or None, local dir to pull artifacts from install_dir: string, directory to install VNDK snapshot temp_artifact_dir: string, temp directory to hold build artifacts fetched from Android Build server. For 'local' option, is set to None. """ artifact_pattern = 'android-vndk-*.zip' if branch and build: artifact_dir = temp_artifact_dir os.chdir(temp_artifact_dir) logging.info('Fetching {pattern} from {branch} (bid: {build})'.format( pattern=artifact_pattern, branch=branch, build=build)) utils.fetch_artifact(branch, build, artifact_pattern) manifest_pattern = 'manifest_{}.xml'.format(build) logging.info('Fetching {file} from {branch} (bid: {build})'.format( file=manifest_pattern, branch=branch, build=build)) utils.fetch_artifact(branch, build, manifest_pattern, utils.MANIFEST_FILE_NAME) os.chdir(install_dir) elif local_dir: logging.info('Fetching local VNDK snapshot from {}'.format(local_dir)) artifact_dir = local_dir artifacts = glob.glob(os.path.join(artifact_dir, artifact_pattern)) for artifact in artifacts: logging.info('Unzipping VNDK snapshot: {}'.format(artifact)) utils.check_call(['unzip', '-qn', artifact, '-d', install_dir])
def getNewCL(revision: str, version: str, kernel_repo_path: str): """Upload kernel/common CL to switch clang version.""" logging.info(f'Uploading Kernel CL to switch to clang-{revision}') # Add a random hashtag so we can discover the CL number. hashtag = 'chk-' + ''.join(random.sample(string.digits, 8)) utils.check_call([ str(test_paths.LLVM_ANDROID_DIR / 'update_kernel_toolchain.py'), kernel_repo_path, 'common', 'NA', # no clang_bin. We're using --clang_revision instead. 'NA', # no bug f'--clang_version={revision}:{version}', f'--hashtag={hashtag}', '--no_topic', '--wip', ]) json_output = gerrit_query_change(f'hashtag:{hashtag}') if len(json_output) != 1: raise RuntimeError('Upload failed; or hashtag not unique. ' + f'Gerrit query returned {json_output}') return KernelCL.getExistingCL(str(json_output[0]['_number']))
def _build_config(self) -> None: logger().info('Building %s for %s', self.name, self._config) if self.remove_install_dir and self.install_dir.exists(): shutil.rmtree(self.install_dir) self.output_dir.mkdir(parents=True, exist_ok=True) self._touch_autoconfig_files() # Write flags to files, to avoid various escaping issues. cflags = self._config.cflags + self.cflags cxxflags = self._config.cxxflags + self.cxxflags ldflags = self._config.ldflags + self.ldflags cflags_file = self.output_dir / 'cflags' cxxflags_file = self.output_dir / 'cxxflags' with cflags_file.open('w') as argfile: argfile.write(' '.join(cflags + ldflags)) with cxxflags_file.open('w') as argfile: argfile.write(' '.join(cxxflags + ldflags)) env = self.env env['CC'] = f'{self._cc} @{cflags_file}' env['CXX'] = f'{self._cxx} @{cxxflags_file}' config_cmd = [ str(self.src_dir / 'configure'), f'--prefix={self.install_dir}' ] config_cmd.extend(self.config_flags) utils.create_script(self.output_dir / 'config_invocation.sh', config_cmd, env) utils.check_call(config_cmd, cwd=self.output_dir, env=env) make_cmd = [ str(paths.MAKE_BIN_PATH), f'-j{multiprocessing.cpu_count()}' ] utils.check_call(make_cmd, cwd=self.output_dir) self.install_config()
def make_video(job_id, pic, mp3, out): job = models.Job.query.filter_by(uniqid=job_id).first() if not job: return False try: job.state = models.JOB_STARTED db.session.commit() # Lower the picture resolution. check_call(["/usr/bin/convert", pic, "-resize", "640", "%s.jpg" % pic]) pic = "%s.jpg" % pic pic_ident = check_call(["/usr/bin/identify", pic]) try: hor_res = pic_ident.split(" ")[2].split("x")[1] except IndexError: check_call(["/usr/bin/convert", pic, "-resize", "640x480!", pic]) if int(hor_res) % 2 != 0: new_res = int(hor_res) + 1 check_call( ["/usr/bin/convert", pic, "-resize", "640x%s!" % new_res, pic]) check_call([ "/usr/bin/ffmpeg", "-loop", "1", "-i", pic, "-i", mp3, "-shortest", "-c:v", "libx264", "-c:a", "copy", "-profile:v", "baseline", "-level:v", "1.0", "-tune", "stillimage", out ]) job.vid_size = os.path.getsize(out) job.state = models.JOB_FINISHED job.finished = datetime.datetime.utcnow() db.session.commit() except (CalledProcessError, os.error), e: # Encoding failed. job.state = models.JOB_FAILED db.session.commit() log.error("Failed processing %s: %s" % (job_id, e)) return False
def _build_config(self) -> None: logger().info('Building %s for %s', self.name, self._config) if self.remove_cmake_cache: self._rm_cmake_cache(self.output_dir) if self.remove_install_dir and self.install_dir.exists(): shutil.rmtree(self.install_dir) cmake_cmd: List[str] = [str(paths.CMAKE_BIN_PATH), '-G', 'Ninja'] cmake_cmd.extend(f'-D{key}={val}' for key, val in self.cmake_defines.items()) cmake_cmd.append(str(self.src_dir)) self.output_dir.mkdir(parents=True, exist_ok=True) utils.check_call(cmake_cmd, cwd=self.output_dir, env=self.env) ninja_cmd: List[str] = [str(paths.NINJA_BIN_PATH)] if self.ninja_target: ninja_cmd.append(self.ninja_target) utils.check_call(ninja_cmd, cwd=self.output_dir, env=self.env) self.install()
def make_video(job_id, pic, mp3, out): job = models.Job.query.filter_by(uniqid=job_id).first() if not job: return False try: job.state = models.JOB_STARTED db.session.commit() # Lower the picture resolution. check_call([ "/usr/bin/convert", pic, "-resize", "640", "%s.jpg" % pic ]) pic = "%s.jpg" % pic pic_ident = check_call([ "/usr/bin/identify", pic ]) try: hor_res = pic_ident.split(" ")[2].split("x")[1] except IndexError: check_call([ "/usr/bin/convert", pic, "-resize", "640x480!", pic ]) if int(hor_res) % 2 != 0: new_res = int(hor_res) + 1 check_call([ "/usr/bin/convert", pic, "-resize", "640x%s!" % new_res, pic ]) check_call([ "/usr/bin/ffmpeg", "-loop", "1", "-i", pic, "-i", mp3, "-shortest", "-c:v", "libx264", "-c:a", "copy", "-profile:v", "baseline", "-level:v", "1.0", "-tune", "stillimage", out ]) job.vid_size = os.path.getsize(out) job.state = models.JOB_FINISHED job.finished = datetime.datetime.utcnow() db.session.commit() except (CalledProcessError, os.error), e: # Encoding failed. job.state = models.JOB_FAILED db.session.commit() log.error("Failed processing %s: %s" % (job_id, e)) return False
def _check_revision_exists(self, revision, git_project_path): """Checks whether a revision is found in a git project of current tree. Args: revision: string, revision value recorded in manifest.xml git_project_path: string, path relative to ANDROID_BUILD_TOP """ path = utils.join_realpath(self._android_build_top, git_project_path) def _check_rev_list(revision): """Checks whether revision is reachable from HEAD of git project.""" logging.info('Checking if revision {rev} exists in {proj}'.format( rev=revision, proj=git_project_path)) try: cmd = [ 'git', '-C', path, 'rev-list', 'HEAD..{}'.format(revision) ] output = utils.check_output(cmd).strip() except subprocess.CalledProcessError as error: logging.error('Error: {}'.format(error)) return False else: if output: logging.debug( '{proj} does not have the following revisions: {rev}'. format(proj=git_project_path, rev=output)) return False else: logging.info( 'Found revision {rev} in project {proj}'.format( rev=revision, proj=git_project_path)) return True if not _check_rev_list(revision): # VNDK snapshots built from a *-release branch will have merge # CLs in the manifest because the *-dev branch is merged to the # *-release branch periodically. In order to extract the # revision relevant to the source of the git_project_path, # we fetch the *-release branch and get the revision of the # parent commit with FETCH_HEAD^2. logging.info( 'Checking if the parent of revision {rev} exists in {proj}'. format(rev=revision, proj=git_project_path)) try: cmd = ['git', '-C', path, 'fetch', self._remote_git, revision] utils.check_call(cmd) cmd = ['git', '-C', path, 'rev-parse', 'FETCH_HEAD^2'] parent_revision = utils.check_output(cmd).strip() except subprocess.CalledProcessError as error: logging.error( 'Failed to get parent of revision {rev} from "{remote}": ' '{err}'.format( rev=revision, remote=self._remote_git, err=error)) logging.error('Try --remote to manually set remote name') raise else: if not _check_rev_list(parent_revision): return False return True
def start_branch(build): branch_name = 'update-' + (build or 'local') logging.info('Creating branch {branch} in {dir}'.format( branch=branch_name, dir=os.getcwd())) utils.check_call(['repo', 'start', branch_name, '.'])