def cmd_build(args): LOG.info('build: %s %s', args.rule, args.version) scripts.run([ REPO_ROOT_PATH / 'shipyard2' / 'scripts' / 'foreman.sh', 'build', *(('--debug', ) if shipyard2.is_debug() else ()), *_read_args_file(args.args_file or ()), *('--parameter', '//bases:inside-builder-pod=false'), *( '--parameter', '//%s:%s=%s' % ( args.rule.path, args.rule.name.with_name('version'), args.version, ), ), args.rule, ]) if args.also_release: if _look_like_pod_rule(args.rule): release = _get_envs_dir(args).release_pod elif _look_like_xar_rule(args.rule): release = _get_envs_dir(args).release_xar else: ASSERT.predicate(args.rule, _look_like_image_rule) release = None if release: label = _guess_label_from_rule(args.rule) LOG.info('release: %s %s to %s', label, args.version, args.env) release(args.env, label, args.version) return 0
def _mount_overlay(pod_dir_path, config): rootfs_path = _get_rootfs_path(pod_dir_path) LOG.info('mount overlay: %s', rootfs_path) # # Since we should have added image refs, it is safe to access image # directories without locking them. # # NOTE: You cannot use _iter_ref_image_ids here as its result is not # ordered; you must use _iter_image_ids. # image_ids = list(_iter_image_ids(config)) # Call reverse() because in overlay file system, lower directories # are ordered from high to low. image_ids.reverse() scripts.run([ 'mount', *('-t', 'overlay'), *( '-o', 'lowerdir=%s,upperdir=%s,workdir=%s' % ( ':'.join( str(_get_image_rootfs_path(image_id)) for image_id in image_ids ), _get_upper_path(pod_dir_path), _get_work_path(pod_dir_path), ), ), 'overlay', rootfs_path, ])
def build(parameters): LOG.info('install package %s version %s', package, version) with scripts.using_sudo(): scripts.run([ parameters['//third-party/cpython:pip'], 'install', '%s==%s' % (package, version), ])
def _install(): if Path('/usr/local/include/boost').exists(): LOG.info('skip: install boost') return LOG.info('install boost') with scripts.using_sudo(): scripts.run(['./b2', 'install']) scripts.run(['ldconfig'])
def rsync(src_path, dst_path, rsync_args=()): scripts.run([ 'rsync', '--archive', *rsync_args, # Use the trailing slash trick. '%s/' % src_path, dst_path, ])
def build(parameters): src_path = find_package( parameters, foreman.get_relpath(), sub_directory_path, ) LOG.info('build first-party package: %s', src_path) with scripts.using_cwd(src_path): scripts.run(['npm', 'install']) scripts.run(['npm', 'run', 'build'])
def _fixup(): # TODO: Patch some scripts for Python 3.10. Remove this after # upstream fixes it. scripts.run([ 'sed', '--in-place', '--regexp-extended', r's/(from\s+collections)\s+(import\s+Mapping)/\1.abc \2/', 'third_party/jinja2/tests.py', ])
def _configure(parameters, src_path): if (src_path / 'Makefile').exists(): LOG.info('skip: configure cpython build') return LOG.info('configure cpython build') scripts.run([ './configure', *('--prefix', parameters['prefix']), *parameters['configuration'], *(('--enable-shared', ) if parameters['shared'] else ()), ])
def setup(parameters): src_path = ASSERT.predicate( _find_project(parameters, foreman.get_relpath()), _is_root_project, ) if (src_path / 'gradlew').exists(): LOG.info('skip: generate gradle wrapper') return LOG.info('generate gradle wrapper') with scripts.using_cwd(src_path): scripts.run(['gradle', 'wrapper'])
def _install(parameters, src_path): del src_path # Unused. if parameters['python'].exists(): LOG.info('skip: install cpython') return LOG.info('install cpython') with scripts.using_sudo(): # (Probably a bug?) When optimizations are enabled, this will # re-run `make run_profile_task`. scripts.make(['install']) if parameters['shared']: scripts.run(['ldconfig'])
def rsync_copy(src_path, dst_path, rsync_args=()): # We do NOT use ``shutil.copytree`` because shutil's file copy # functions in general do not preserve the file owner/group. LOG.info('copy: %s -> %s', src_path, dst_path) scripts.run([ 'rsync', '--archive', *rsync_args, # Trailing slash is an rsync trick. '%s/' % src_path, dst_path, ])
def _umount(path): ASSERT.not_predicate(path, Path.is_symlink) LOG.info('umount: %s', path) try: with scripts.doing_capture_stderr(): scripts.run(['umount', path]) except subprocess.CalledProcessError as exc: if _UMOUNT_ERROR_WHITELIST.search(exc.stderr, re.MULTILINE): LOG.debug('umount err: %s, %s', path, exc.stderr, exc_info=True) else: LOG.error('umount err: %s, %s', path, exc.stderr) raise
def cmd_prepare_base_rootfs(image_rootfs_path): ASSERT.not_predicate(image_rootfs_path, Path.exists) oses.assert_root_privilege() scripts.run([ 'debootstrap', '--variant=minbase', '--components=main', # Install dbus for convenience. # Install sudo for changing service user/group. # Install tzdata for /etc/localtime. '--include=dbus,sudo,systemd,tzdata', models.BASE_IMAGE_RELEASE_CODE_NAME, image_rootfs_path, 'http://us.archive.ubuntu.com/ubuntu/', ])
def _git_get_dirty(): proc = scripts.run(['git', 'status', '--porcelain']) for status in proc.stdout.decode('utf-8').split('\n'): # Be careful of empty line! if status and not status.startswith(' '): return True return False
def _build(parameters, src_path, config_data): libraries = ASSERT.getitem(config_data, 'libraries') if (src_path / 'stage').exists(): LOG.info('skip: build boost: %s', libraries) return LOG.info('build boost: %s', libraries) scripts.run([ './bootstrap.sh', '--with-libraries=%s' % ','.join(libraries), *(('--with-python=%s' % parameters['//third-party/cpython:python'], ) if 'python' in libraries else ()), 'variant=release', 'link=shared', 'threading=multi', ]) scripts.run(['./b2', 'stage'])
def _git_get_url(source): proc = scripts.run(['git', 'remote', '--verbose']) for remote in proc.stdout.decode('utf-8').split('\n'): remote = remote.split() if remote[0] == 'origin': return remote[1] return ASSERT.unreachable('expect remote origin: {}', source)
def _build(src_path): if (src_path / 'out.gn/x64.release/obj/libv8_monolith.a').exists(): LOG.info('skip: build v8') return LOG.info('build v8') with scripts.using_cwd(src_path): _fixup() scripts.run([ './tools/dev/v8gen.py', 'gen', # x64.release.sample sets v8_monolithic=true. *('-b', 'x64.release.sample'), # Remove ".sample" from output directory. 'x64.release', ]) scripts.run(['ninja', '-C', 'out.gn/x64.release', 'v8_monolith'])
def build(parameters): src_path = _find_project(parameters, foreman.get_relpath()) root_path = _find_root_project(src_path) ASSERT.false(src_path.samefile(root_path)) output_path = src_path / ('build/libs/%s-all.jar' % src_path.name) task = ':'.join(src_path.relative_to(root_path).parts) task = ':%s:shadowJar' % task target_dir_path = parameters[root_project + ':packages'] if (target_dir_path / output_path.name).exists(): LOG.info('skip: run task %s', task) return LOG.info('run task %s', task) with scripts.using_cwd(root_path): scripts.run(['./gradlew', task]) with scripts.using_sudo(): scripts.mkdir(target_dir_path) scripts.cp(output_path, target_dir_path)
def ctr(args): global _VERBOSE if _VERBOSE is None: if logging.getLogger().isEnabledFor(logging.DEBUG): _VERBOSE = ('--verbose', ) else: _VERBOSE = () return scripts.run(['ctr', *_VERBOSE, *args])
def build_image(metadata, make_rootfs, output_path): ASSERT.not_predicate(output_path, g1.files.lexists) with tempfile.TemporaryDirectory( dir=output_path.parent, prefix=output_path.name + '-', ) as temp_output_dir_path: temp_output_dir_path = Path(temp_output_dir_path) _write_metadata(metadata, temp_output_dir_path) make_rootfs(get_rootfs_path(temp_output_dir_path)) _setup_image_dir(temp_output_dir_path) scripts.run([ 'tar', '--create', *('--file', output_path), '--gzip', *('--directory', temp_output_dir_path), _METADATA, _ROOTFS, ])
def _get_var_path(name): with scripts.doing_capture_stdout(): proc = scripts.run([ 'pkg-config', '--variable=%s' % name, 'capnp', ]) return ASSERT.predicate( Path(proc.stdout.decode('utf-8').strip()), Path.is_dir, )
def _build(parameters, make_global_options): # `sudo --preserve-env` does not preserve PYTHONPATH (in case you # are curious, you may run `sudo sudo -V` to get the list of # preserved variables). with scripts.using_sudo(), scripts.preserving_sudo_envs(['PYTHONPATH']): scripts.run([ parameters['//third-party/cpython:pip'], 'install', # Use `--no-deps` (`python3 setup.py install` does not # support this, by the way) so that we won't implicitly # install dependencies (you must explicitly specify them). '--no-deps', # Because we add a few Python package to PYTHONPATH, such as # g1.bases, we need to force their installation (otherwise # pip would consider them already installed). '--upgrade', '--force-reinstall', *_build_get_global_options(parameters, make_global_options), '.', ])
def _fetch(parameters, src_path): if src_path.exists(): LOG.info('skip: fetch v8') return LOG.info('fetch v8') scripts.mkdir(src_path.parent) with scripts.using_cwd(src_path.parent): scripts.run(['fetch', 'v8']) branch = 'branch-heads/%s' % parameters['branch-head'] with scripts.using_cwd(src_path): scripts.run(['git', 'checkout', branch]) scripts.run(['git', 'pull', 'origin', branch]) scripts.run(['gclient', 'sync'])
def build(parameters): src_path = parameters['//bases:drydock'] / foreman.get_relpath() src_path /= src_path.name build_dir_path = src_path / 'build' if build_dir_path.exists(): LOG.info('skip: build nng') return LOG.info('build nng') scripts.mkdir(build_dir_path) with scripts.using_cwd(build_dir_path): scripts.run([ 'cmake', *('-D', 'BUILD_SHARED_LIBS:BOOL=ON'), *('-G', 'Ninja'), '..', ]) scripts.run(['ninja']) # Skip `ninja test` for now. with scripts.using_sudo(): scripts.run(['ninja', 'install']) scripts.run(['ldconfig'])
def build(parameters): src_path = parameters['//bases:drydock'] / foreman.get_relpath() src_path /= src_path.name if (src_path / 'c++/.libs/libcapnp.so').exists(): LOG.info('skip: build capnproto') return LOG.info('build capnproto') with scripts.using_cwd(src_path / 'c++'): scripts.run(['autoreconf', '-i']) scripts.run(['./configure']) # Skip `make check` for now. scripts.make() with scripts.using_sudo(): scripts.make(['install']) scripts.run(['ldconfig'])
def _git_get_revision(): proc = scripts.run(['git', 'log', '-1', '--format=format:%H']) return proc.stdout.decode('ascii').strip()
def build(parameters): src_path = parameters['//bases:drydock'] / foreman.get_relpath() src_path /= src_path.name scripts.export_path('PATH', src_path) with scripts.using_cwd(src_path): scripts.run(['gclient']) # This updates depot_tools.