def austerity(parameters): src_path = _get_src_path(parameters) with scripts.using_sudo(): for path in src_path.iterdir(): if path.name not in ('stage', ): scripts.rm(path, recursive=True) scripts.mkdir(src_path / '.git')
def build(parameters): version = ASSERT.not_none(parameters[parameter_version]) pod_dir_path = releases.get_output_dir_path(parameters, name, version) if ( pod_dir_path / \ shipyard2.POD_DIR_RELEASE_METADATA_FILENAME ).exists(): LOG.info('skip: build pod: %s %s', name, version) return LOG.info('build pod: %s %s', name, version) try: scripts.mkdir(pod_dir_path) releases.generate_release_metadata( parameters, pod_dir_path / shipyard2.POD_DIR_RELEASE_METADATA_FILENAME, ) _generate_deploy_instruction( parameters=parameters, pod_dir_path=pod_dir_path, name=name, version=version, apps=apps, images=images, mounts=mounts, volumes=volumes, systemd_unit_groups=systemd_unit_groups, token_names=token_names, ) _link_images(parameters, pod_dir_path, images) _link_volumes(parameters, pod_dir_path, volumes) except Exception: # Roll back on error. scripts.rm(pod_dir_path, recursive=True) raise
def build(parameters): ASSERT.is_(parameters['inside-builder-pod'], True) ASSERT.all(parameters['roots'], _is_root_dir) with scripts.using_sudo(): # We should run `apt-get update` even when we are not upgrading # the full system because some packages may be removed from the # distro repo while our local package index still has it. scripts.apt_get_update() scripts.mkdir(parameters['drydock'])
def config(parameters): config_path = _get_config_path(parameters) scripts.mkdir(config_path.parent) config_path.write_text( json.dumps({ 'libraries': sorted(ASSERT.not_empty(parameters['libraries'])), }) )
def _link_volumes(parameters, pod_dir_path, volumes): scripts.mkdir(pod_dir_path / shipyard2.POD_DIR_VOLUMES_DIR_NAME) for volume in volumes: _link( shipyard2.POD_DIR_VOLUMES_DIR_NAME, parameters, pod_dir_path, foreman.Label.parse(volume.label), volume.version, )
def extract(parameters): archive = parameters[parameter_archive] archive_path = _archive_get_archive_path(parameters, archive) output_path = _archive_get_output_path(parameters, archive) if output_path.exists(): LOG.info('skip: extract archive: %s', archive_path) return LOG.info('extract archive: %s', archive_path) scripts.mkdir(output_path.parent) scripts.extract(archive_path, directory=output_path.parent) ASSERT.predicate(output_path, Path.is_dir)
def _fetch(parameters, src_path): if src_path.exists(): LOG.info('skip: fetch v8') return LOG.info('fetch v8') scripts.mkdir(src_path.parent) with scripts.using_cwd(src_path.parent): scripts.run(['fetch', 'v8']) branch = 'branch-heads/%s' % parameters['branch-head'] with scripts.using_cwd(src_path): scripts.run(['git', 'checkout', branch]) scripts.run(['git', 'pull', 'origin', branch]) scripts.run(['gclient', 'sync'])
def download(parameters): archive = parameters[parameter_archive] archive_path = _archive_get_archive_path(parameters, archive) if archive_path.exists(): LOG.info('skip: download archive: %s', archive.url) return LOG.info('download archive: %s', archive.url) scripts.mkdir(archive_path.parent) scripts.wget( archive.url, output_path=archive_path, headers=wget_headers, ) ASSERT.predicate(archive_path, Path.is_file) if archive.checksum: scripts.validate_checksum(archive_path, archive.checksum)
def _link_images(parameters, pod_dir_path, images): scripts.mkdir(pod_dir_path / shipyard2.POD_DIR_IMAGES_DIR_NAME) _link( shipyard2.POD_DIR_IMAGES_DIR_NAME, parameters, pod_dir_path, shipyard2.BASE_LABEL, None, ) for label in images: _link( shipyard2.POD_DIR_IMAGES_DIR_NAME, parameters, pod_dir_path, label, None, )
def build(parameters): src_path = _find_project(parameters, foreman.get_relpath()) root_path = _find_root_project(src_path) ASSERT.false(src_path.samefile(root_path)) output_path = src_path / ('build/libs/%s-all.jar' % src_path.name) task = ':'.join(src_path.relative_to(root_path).parts) task = ':%s:shadowJar' % task target_dir_path = parameters[root_project + ':packages'] if (target_dir_path / output_path.name).exists(): LOG.info('skip: run task %s', task) return LOG.info('run task %s', task) with scripts.using_cwd(root_path): scripts.run(['./gradlew', task]) with scripts.using_sudo(): scripts.mkdir(target_dir_path) scripts.cp(output_path, target_dir_path)
def base_build(parameters): version = ASSERT.not_none(parameters['%s/version' % shipyard2.BASE]) image_paths = [ utils.get_image_path(parameters, shipyard2.BASE), utils.get_builder_image_path(parameters, shipyard2.BASE), ] if all(map(Path.is_file, image_paths)): LOG.info('skip: build base: %s %s', version, image_paths) return ASSERT.not_any(image_paths, Path.is_file) LOG.info('build base: %s %s', version, image_paths) for image_path in image_paths: scripts.mkdir(image_path.parent) with contextlib.ExitStack() as stack: _build_base(stack, version, image_paths[0], image_paths[1]) for image_path in image_paths: utils.chown(image_path) with scripts.using_sudo(): for image_path in image_paths: ctr_scripts.ctr_import_image(image_path)
def build(parameters): src_path = parameters['//bases:drydock'] / foreman.get_relpath() src_path /= src_path.name build_dir_path = src_path / 'build' if build_dir_path.exists(): LOG.info('skip: build nng') return LOG.info('build nng') scripts.mkdir(build_dir_path) with scripts.using_cwd(build_dir_path): scripts.run([ 'cmake', *('-D', 'BUILD_SHARED_LIBS:BOOL=ON'), *('-G', 'Ninja'), '..', ]) scripts.run(['ninja']) # Skip `ninja test` for now. with scripts.using_sudo(): scripts.run(['ninja', 'install']) scripts.run(['ldconfig'])
def web_server_setup(parameters): del parameters # Unused. with scripts.using_sudo(): scripts.mkdir('/srv/web') scripts.cp(foreman.to_path('web-server/index.html'), '/srv/web')
def init(repo_path): scripts.mkdir(repo_path / shipyard2.RELEASE_ENVS_DIR_NAME)
def init(cls, repo_path): scripts.mkdir(repo_path / cls._TOP_DIR_NAME)
def database_setup(parameters): del parameters # Unused. with scripts.using_sudo(): scripts.mkdir(OPS_DB_PATH)
def haproxy_setup(parameters): del parameters # Unused. with scripts.using_sudo(): scripts.mkdir(HAPROXY_PATH)