def define_package( *, name_prefix='', deps=(), sub_directory_path=None, ): """Define a first-party package. This defines: * Rule: [name_prefix/]build. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) rule_build = name_prefix + 'build' @foreman.rule(rule_build) @foreman.rule.depend('//bases:build') @foreman.rule.depend('//third-party/nodejs:build') def build(parameters): src_path = find_package( parameters, foreman.get_relpath(), sub_directory_path, ) LOG.info('build first-party package: %s', src_path) with scripts.using_cwd(src_path): scripts.run(['npm', 'install']) scripts.run(['npm', 'run', 'build']) for dep in deps: build = build.depend(dep) return PackageRules(build=build)
def define_pypi_package( package, version, *, name_prefix='', extras=(), ): """Define a PyPI-hosted package. This defines: * Rule: [name_prefix/]build. * Rule: [name_prefix/]build/<extra> for each extra. NOTE: Unlike first-party packages, the extras argument here is just a list of extra names, rather than pairs of extra name and extra's dependencies. """ rule_build = rules.canonicalize_name_prefix(name_prefix) + 'build' return PackageRules( build=_pypi_make_build_rule(rule_build, package, version), build_extras={ extra: _pypi_make_build_rule( '%s/%s' % (rule_build, extra), '%s[%s]' % (package, extra), version, ) for extra in extras }, )
def define_git_repo( repo_url, treeish, *, name_prefix='', ): """Define a git repo. Given a rule "//rule/path/foo", this checks out the repo and its sub modules into "drydock/rule/path/foo/foo". Note the extra "foo" of the repo path - since the repo is checked into a sub directory, you may use the parent directory as a scratch pad. This defines: * Rule: [name_prefix/]git-clone. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) rule_git_clone = name_prefix + 'git-clone' @foreman.rule(rule_git_clone) @foreman.rule.depend('//bases:build') @foreman.rule.depend('//bases:git-repo/install') def git_clone(parameters): repo_path = parameters['//bases:drydock'] / foreman.get_relpath() repo_path /= repo_path.name git_dir_path = repo_path / '.git' if git_dir_path.is_dir(): LOG.info('skip: git clone: %s', repo_url) return LOG.info('git clone: %s', repo_url) scripts.git_clone(repo_url, repo_path=repo_path, treeish=treeish) ASSERT.predicate(git_dir_path, Path.is_dir) return GitRepoRules(git_clone=git_clone)
def define_distro_packages( packages, *, name_prefix='', ): """Define distro packages. This defines: * Parameter: [name_prefix/]packages. * Rule: [name_prefix/]install. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) parameter_packages = name_prefix + 'packages' rule_install = name_prefix + 'install' (foreman.define_parameter.list_typed(parameter_packages)\ .with_default(packages)) @foreman.rule(rule_install) @foreman.rule.depend('//bases:build') def install(parameters): with scripts.using_sudo(): scripts.apt_get_install(parameters[parameter_packages]) return DistroPackagesRules(install=install)
def define_application(root_project, *, name_prefix=''): """Define a first-party fat-JAR application. This defines: * Rule: [name_prefix]/build. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) rule_build = name_prefix + 'build' @foreman.rule(rule_build) @foreman.rule.depend('//bases:build') @foreman.rule.depend(root_project + ':setup') def build(parameters): src_path = _find_project(parameters, foreman.get_relpath()) root_path = _find_root_project(src_path) ASSERT.false(src_path.samefile(root_path)) output_path = src_path / ('build/libs/%s-all.jar' % src_path.name) task = ':'.join(src_path.relative_to(root_path).parts) task = ':%s:shadowJar' % task target_dir_path = parameters[root_project + ':packages'] if (target_dir_path / output_path.name).exists(): LOG.info('skip: run task %s', task) return LOG.info('run task %s', task) with scripts.using_cwd(root_path): scripts.run(['./gradlew', task]) with scripts.using_sudo(): scripts.mkdir(target_dir_path) scripts.cp(output_path, target_dir_path) return ApplicationRules(build=build)
def define_build_time_package( *, name_prefix='', ): """Define a first-party build-time package. This defines: * Rule: [name_prefix/]build. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) rule_build = name_prefix + 'build' @foreman.rule(rule_build) @foreman.rule.depend('//bases:build') @foreman.rule.depend('//third-party/cpython:build') def build(parameters): src_path = find_package(parameters, foreman.get_relpath()) LOG.info('export first-party host package: %s', src_path) scripts.export_path('PYTHONPATH', src_path) return PackageRules(build=build, build_extras={})
def define_package( *, name_prefix='', build_time_deps=(), deps=(), extras=(), make_global_options=None, ): """Define a first-party package. This defines: * Rule: [name_prefix/]build. * Rule: [name_prefix/]build/<extra> for each extra. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) rule_build = name_prefix + 'build' @foreman.rule(rule_build) @foreman.rule.depend('//bases:build') @foreman.rule.depend('//third-party/cpython:build') def build(parameters): src_path = find_package(parameters, foreman.get_relpath()) LOG.info('build first-party package: %s', src_path) with scripts.using_cwd(src_path): _build(parameters, make_global_options) for build_time_dep in build_time_deps: build = build.depend(build_time_dep) for dep in deps: build = build.depend(dep) build_extras = { extra: _make_build_extra(extra, rule_build, extra_deps) for extra, extra_deps in extras } return PackageRules(build=build, build_extras=build_extras)
def define_archive( url, *, name_prefix='', filename=None, output=None, checksum=None, wget_headers=(), ): """Define an archive. This defines: * Parameter: [name_prefix/]archive. * Rule: [name_prefix/]download. * Rule: [name_prefix/]extract. """ name_prefix = rules.canonicalize_name_prefix(name_prefix) parameter_archive = name_prefix + 'archive' rule_download = name_prefix + 'download' rule_extract = name_prefix + 'extract' (foreman.define_parameter.namedtuple_typed(Archive, parameter_archive)\ .with_doc('archive info') .with_default(_archive_make(url, filename, output, checksum))) @foreman.rule(rule_download) @foreman.rule.depend('//bases:archive/install') @foreman.rule.depend('//bases:build') def download(parameters): archive = parameters[parameter_archive] archive_path = _archive_get_archive_path(parameters, archive) if archive_path.exists(): LOG.info('skip: download archive: %s', archive.url) return LOG.info('download archive: %s', archive.url) scripts.mkdir(archive_path.parent) scripts.wget( archive.url, output_path=archive_path, headers=wget_headers, ) ASSERT.predicate(archive_path, Path.is_file) if archive.checksum: scripts.validate_checksum(archive_path, archive.checksum) @foreman.rule(rule_extract) @foreman.rule.depend('//bases:archive/install') @foreman.rule.depend('//bases:build') @foreman.rule.depend(rule_download) def extract(parameters): archive = parameters[parameter_archive] archive_path = _archive_get_archive_path(parameters, archive) output_path = _archive_get_output_path(parameters, archive) if output_path.exists(): LOG.info('skip: extract archive: %s', archive_path) return LOG.info('extract archive: %s', archive_path) scripts.mkdir(output_path.parent) scripts.extract(archive_path, directory=output_path.parent) ASSERT.predicate(output_path, Path.is_dir) return ArchiveRules(download=download, extract=extract)