def BaseConfig(CHECKOUT_PATH, **dummy_kwargs): return ConfigGroup( CHECKOUT_PATH=Static(CHECKOUT_PATH), official_build=Single(bool, empty_val=False, required=False), unittests_gypi=Single(Path, required=False), version_file=Single(Path, required=False), )
def BaseConfig(PERF_ID=None, PERF_CONFIG=None, TEST_SUITE=None, **_kwargs): return ConfigGroup( PERF_ID=Static(PERF_ID), PERF_CONFIG=Static(PERF_CONFIG), TEST_SUITE=Static(TEST_SUITE), use_isolate=Single(bool, False), enable_swarming=Single(bool, False), )
def BaseConfig(**_kwargs): return ConfigGroup( # Various config options to be exercised in tests. thedict=Dict(value_type=tuple), thelist=List(basestring), thestring=Single(basestring, required=True), thebool=Single(bool, required=False, empty_val=False), thesubconfig=ConfigGroup( thefloat=Single(float), thestaticint=Static(42, hidden=False), ), )
def BaseConfig(**_kwargs): shard_count = _kwargs.get('SHARD_COUNT', 1) shard_run = _kwargs.get('SHARD_RUN', 1) assert shard_count >= 1 assert shard_run >= 1 assert shard_run <= shard_count return ConfigGroup( gyp_env=ConfigGroup( AR=Single(basestring, required=False), CC=Single(basestring, required=False), CXX=Single(basestring, required=False), CXX_host=Single(basestring, required=False), LINK=Single(basestring, required=False), RANLIB=Single(basestring, required=False), ), mips_cross_compile=Single(bool, empty_val=False, required=False), # Test configuration that is the equal for all tests of a builder. It # might be refined later in the test runner for distinct tests. testing=ConfigGroup( test_args=List(basestring), may_shard=Single(bool, empty_val=True, required=False), SHARD_COUNT=Static(shard_count), SHARD_RUN=Static(shard_run), ), )
def BaseConfig(USE_MIRROR=False): chromium_in_android_subpath = ('external', 'chromium_org') build_path = Path('[SLAVE_BUILD]', 'android-src') return ConfigGroup( lunch_flavor=Single(basestring), repo=ConfigGroup( url=Single(basestring), branch=Single(basestring), sync_flags=List(basestring), ), USE_MIRROR=Static(bool(USE_MIRROR)), # If present causes the sync step to use the specified manifest instead of # the one associated with the repo.branch. sync_manifest_override=Single(Path, required=False), # Path stuff chromium_in_android_subpath=Static( '/'.join(chromium_in_android_subpath)), build_path=Static(build_path), slave_chromium_in_android_path=Static( build_path.join(*chromium_in_android_subpath)), slave_android_out_path=Static(build_path.join('out')), )
'bad_revision': Property( kind=str, help='The first known good revision.'), 'tests': Property( kind=Dict(value_type=list), default={}, help='The failed tests, the test name should be full name, e.g.: {' ' "browser_tests": [' ' "suite.test1", "suite.test2"' ' ]' '}'), 'buildbucket': Property( default=None, help='The buildbucket property in which we can find build id.' 'We need to use build id to get tests.'), 'use_analyze': Property( kind=Single(bool, empty_val=False, required=False), default=True, help='Use analyze to skip commits that do not affect tests.'), 'suspected_revisions': Property( kind=List(basestring), default=[], help='A list of suspected revisions from heuristic analysis.'), } class TestResult(object): SKIPPED = 'skipped' # A commit doesn't impact the test. PASSED = 'passed' # The compile or test passed. FAILED = 'failed' # The compile or test failed. INFRA_FAILED = 'infra_failed' # Infra failed. def _compile_and_test_at_revision(api, target_mastername, target_buildername,
def BaseConfig(PLATFORM='default'): return ConfigGroup(buildbucket_host=Single(str, required=True), buildbucket_client_path=Single(str, required=True), PLATFORM=Static(str(PLATFORM)))
# Copyright 2017 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. DEPS = [ 'properties', ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$recipe_engine/runtime': Property( help='Properties specifically for the runtime module', param_name='properties', kind=ConfigGroup( # Whether build is running on LUCI stack. is_luci=Single(bool), # Whether build is running in experimental mode. is_experimental=Single(bool), ), default={}, ), }
def BaseConfig(CBB_CONFIG=None, CBB_BRANCH=None, CBB_BUILD_NUMBER=None, CBB_DEBUG=False, CBB_CLOBBER=False, CBB_BUILDBUCKET_ID=None, CBB_MASTER_BUILD_ID=None, CBB_EXTRA_ARGS=None, **_kwargs): cgrp = ConfigGroup( # Base mapping of repository key to repository name. repositories=Dict(value_type=Set(basestring)), # Checkout Chromite at this branch. "origin/" will be prepended. chromite_branch=Single(basestring, empty_val=CBB_BRANCH or 'master'), # Should the Chrome version be supplied to cbuildbot? use_chrome_version=Single(bool), # Should the CrOS manifest commit message be parsed and added to 'cbuildbot' # flags? read_cros_manifest=Single(bool), # cbuildbot tool flags. cbb=ConfigGroup( # The Chromite configuration to use. config=Single(basestring, empty_val=CBB_CONFIG), # If supplied, forward to cbuildbot as '--master-build-id'. build_id=Single(basestring, empty_val=CBB_MASTER_BUILD_ID), # If supplied, forward to cbuildbot as '--buildnumber'. build_number=Single(int, empty_val=CBB_BUILD_NUMBER), # If supplied, forward to cbuildbot as '--chrome_version'. chrome_version=Single(basestring), # If True, add cbuildbot flag: '--debug'. debug=Single(bool, empty_val=CBB_DEBUG), # If True, add cbuildbot flag: '--clobber'. clobber=Single(bool, empty_val=CBB_CLOBBER), # The (optional) configuration repository to use. config_repo=Single(basestring), # If set, supply the "--git-cache-dir" option with this value. git_cache_dir=Single(basestring), # If supplied, forward to cbuildbot as '--buildbucket-id' buildbucket_id=Single(basestring, empty_val=CBB_BUILDBUCKET_ID), # Extra arguments passed to cbuildbot. extra_args=List(basestring), ), # If "chromite_branch" includes a branch version, this will be set to the # version value. Otherwise, this will be None. # # Set in "base". branch_version=Single(int), ) if CBB_EXTRA_ARGS: cgrp.cbb.extra_args = CBB_EXTRA_ARGS return cgrp
def BaseConfig(USE_MIRROR=True, CACHE_DIR=None, BUILDSPEC_VERSION=None, **_kwargs): cache_dir = str(CACHE_DIR) if CACHE_DIR else None return ConfigGroup( solutions=ConfigList(lambda: ConfigGroup( name=Single(basestring), url=Single((basestring, type(None)), empty_val=''), deps_file=Single(basestring, empty_val='.DEPS.git', required=False, hidden=False), managed=Single(bool, empty_val=True, required=False, hidden=False), custom_deps=Dict(value_type=(basestring, types.NoneType)), custom_vars=Dict(value_type=(basestring, types.BooleanType)), safesync_url=Single(basestring, required=False), revision=Single((basestring, gclient_api.RevisionResolver), required=False, hidden=True), )), deps_os=Dict(value_type=basestring), hooks=List(basestring), target_os=Set(basestring), target_os_only=Single(bool, empty_val=False, required=False), target_cpu=Set(basestring), target_cpu_only=Single(bool, empty_val=False, required=False), cache_dir=Static(cache_dir, hidden=False), # If supplied, use this as the source root (instead of the first solution's # checkout). src_root=Single(basestring, required=False, hidden=True), # Maps 'solution' -> build_property # TODO(machenbach): Deprecate this in favor of the one below. # http://crbug.com/713356 got_revision_mapping=Dict(hidden=True), # Maps build_property -> 'solution' got_revision_reverse_mapping=Dict(hidden=True), # Addition revisions we want to pass in. For now theres a duplication # of code here of setting custom vars AND passing in --revision. We hope # to remove custom vars later. revisions=Dict(value_type=(basestring, gclient_api.RevisionResolver), hidden=True), # TODO(iannucci): HACK! The use of None here to indicate that we apply this # to the solution.revision field is really terrible. I mostly blame # gclient. # Maps 'parent_build_property' -> 'custom_var_name' # Maps 'parent_build_property' -> None # If value is None, the property value will be applied to # solutions[0].revision. Otherwise, it will be applied to # solutions[0].custom_vars['custom_var_name'] parent_got_revision_mapping=Dict(hidden=True), delete_unversioned_trees=Single(bool, empty_val=True, required=False), # Maps canonical repo URL to (local_path, revision). # - canonical gitiles repo URL is "https://<host>/<project>" # where project does not have "/a/" prefix or ".git" suffix. # - solution/path is then used to apply patches as patch root in # bot_update. # - if revision is given, it's passed verbatim to bot_update for # corresponding dependency. Otherwise (i.e. None), the patch will be # applied on top of version pinned in DEPS. # This is essentially a whitelist of which repos inside a solution # can be patched automatically by bot_update based on # api.buildbucket.build.input.gerrit_changes[0].project # For example, if bare chromium solution has this entry in repo_path_map # 'http://103.210.161.2:3232/angle/angle': ( # 'src/third_party/angle', 'HEAD') # then a patch to Angle project can be applied to a chromium src's # checkout after first updating Angle's repo to its master's HEAD. repo_path_map=Dict(value_type=tuple, hidden=True), # Check out refs/branch-heads. # TODO (machenbach): Only implemented for bot_update atm. with_branch_heads=Single(bool, empty_val=False, required=False, hidden=True), # Check out refs/tags. with_tags=Single(bool, empty_val=False, required=False, hidden=True), disable_syntax_validation=Single(bool, empty_val=False, required=False), USE_MIRROR=Static(bool(USE_MIRROR)), BUILDSPEC_VERSION=Static(BUILDSPEC_VERSION, hidden=True), )
PROPERTIES = { 'workflow': Property( kind=str, help=('Path to the dataflow workflow you would like to ' 'execute. Will be appended to the infra checkout path. ' 'The path should begin with "packages/dataflow".')), 'job_name': Property( kind=str, help=('Name that appears on the Dataflow console. Must match ' 'the regular expression [a-z]([-a-z0-9]{0,38}[a-z0-9])')), 'gcp_project_id': Property( kind=str, help=('Name of Google Cloud Project under which the Dataflow ' 'job will be executed.')), 'num_workers': Property( kind=Single((int, float)), default=3, help=('Number of GCE instances used to run job.')), 'timeout': Property( kind=Single((int, float)), default=300, help=('Timeout, in seconds.')), } # The dataflow-launcher service account to be used with apache beam framework # can only come in the form of refresh token (the framework doesn't allow for # custom authentication mechanism, so we can't make use of ambient LUCI auth). # Thus, we store it encrypted with Google Cloud KMS in assets/dataflow-launcher # file, and when recipe runs it decrypts it using Cloud KMS. For this, # the (LUCI) task service account under which the recipe is running must have
PROPERTIES = { 'workflow': Property(kind=str, help=('Path to the dataflow workflow you would like to ' 'execute. Will be appended to the infra checkout path. ' 'The path should begin with "packages/dataflow".')), 'job_name': Property(kind=str, help=('Name that appears on the Dataflow console. Must match ' 'the regular expression [a-z]([-a-z0-9]{0,38}[a-z0-9])')), 'gcp_project_id': Property(kind=str, help=('Name of Google Cloud Project under which the Dataflow ' 'job will be executed.')), 'num_workers': Property(kind=Single((int, float)), default=3, help=('Number of GCE instances used to run job.')), 'timeout': Property(kind=Single((int, float)), default=300, help=('Timeout, in seconds.')), } # The dataflow-launcher service account to be used with apache beam framework # can only come in the form of refresh token (the framework doesn't allow for # custom authentication mechanism, so we can't make use of ambient LUCI auth). # Thus, we store it encrypted with Google Cloud KMS in assets/dataflow-launcher # file, and when recipe runs it decrypts it using Cloud KMS. For this, # the (LUCI) task service account under which the recipe is running must have # been granted decrypt rights in Cloud KMS.
def BaseConfig(CBB_CONFIG=None, CBB_BRANCH=None, CBB_BUILD_NUMBER=None, CBB_DEBUG=False, CBB_CLOBBER=False, **_kwargs): return ConfigGroup( # Base mapping of repository key to repository name. repositories=Dict(value_type=Set(basestring)), # Checkout Chromite at this branch. "origin/" will be prepended. chromite_branch=Single(basestring, empty_val=CBB_BRANCH or 'master'), # Should the Chrome version be supplied to cbuildbot? use_chrome_version=Single(bool), # Should the CrOS manifest commit message be parsed and added to 'cbuildbot' # flags? read_cros_manifest=Single(bool), cbb=ConfigGroup( # The Chromite configuration to use. config=Single(basestring, empty_val=CBB_CONFIG), # The buildroot directory name to use. builddir=Single(basestring), # If supplied, forward to cbuildbot as '--master-build-id'. build_id=Single(basestring), # If supplied, forward to cbuildbot as '--buildnumber'. build_number=Single(int, empty_val=CBB_BUILD_NUMBER), # If supplied, forward to cbuildbot as '--chrome-rev'. chrome_rev=Single(basestring), # If supplied, forward to cbuildbot as '--chrome_version'. chrome_version=Single(basestring), # If True, add cbuildbot flag: '--debug'. debug=Single(bool, empty_val=CBB_DEBUG), # If True, add cbuildbot flag: '--clobber'. clobber=Single(bool, empty_val=CBB_CLOBBER), # The (optional) configuration repository to use. config_repo=Single(basestring), # This disables Chromite bootstrapping by omitting the explicit "--branch" # argument. disable_bootstrap=Single(bool), # Whether this Chromite version supports warm cache. # https://chromium-review.googlesource.com/#/c/348011 supports_repo_cache=Single(bool), ), # A list of branches whose Chromite version is "old". Old Chromite # buildbot commands reside in the "buildbot" subdirectory of the Chromite # repository instead of the "bin". old_chromite_branches=Set(basestring), # A list of branches whose builders should not use a shared buildroot. non_shared_root_branches=Set(basestring), # A list of branches whose builders checkout Chrome from SVN instead of Git. chrome_svn_branches=Set(basestring), # Directory where a warm repo cache is stored. If set, and if the current # build supports a warm cache, this will be used to bootstrap the Chromite # checkout. repo_cache_dir=Single(basestring))
from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$gn/macos_sdk': Property( help='Properties specifically for the macos_sdk module.', param_name='sdk_properties', kind=ConfigGroup( # pylint: disable=line-too-long # XCode build version number. Internally maps to an XCode build id like # '9c40b'. See # # https://chrome-infra-packages.appspot.com/p/infra_internal/ios/xcode/mac/+/ # # For an up to date list of the latest SDK builds. sdk_version=Single(str), # The CIPD toolchain tool package and version. tool_pkg=Single(str), tool_ver=Single(str), ), default={ 'sdk_version': '12B5025f', 'tool_package': 'infra/tools/mac_toolchain/${platform}', 'tool_version': 'git_revision:e9b1fe29fe21a1cd36428c43ea2aba244bd31280', }, ) }
from recipe_engine.config import Single from recipe_engine.recipe_api import Property DEPS = [ 'buildbucket', 'properties', ] PROPERTIES = { 'build_requests': Property( kind=List(dict), help='List of params to buildbucket.schedule_request for builds' ' to trigger.'), 'collect_builds': Property( kind=Single(bool), default=False, help='Whether to wait for child builds and surface failures.'), } def RunSteps(api, build_requests, collect_builds): builds_to_schedule = [] for params in build_requests: builds_to_schedule.append(api.buildbucket.schedule_request(**params)) if collect_builds: api.buildbucket.run(builds_to_schedule, raise_if_unsuccessful=True) else: api.buildbucket.schedule(builds_to_schedule)
'recipe_engine/path', 'recipe_engine/platform', 'recipe_engine/properties', 'recipe_engine/python', 'recipe_engine/raw_io', 'recipe_engine/runtime', 'recipe_engine/source_manifest', 'recipe_engine/step', 'tryserver', ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { # Gerrit patches will have all properties about them prefixed with patch_. 'deps_revision_overrides': Property(default={}), 'fail_patch': Property(default=None, kind=str), '$depot_tools/bot_update': Property( help='Properties specific to bot_update module.', param_name='properties', kind=ConfigGroup( # Whether we should do the patching in gclient instead of bot_update apply_patch_on_gclient=Single(bool), ), default={}, ), }
def BaseConfig(INTERNAL=False, REPO_NAME=None, REPO_URL=None, BUILD_CONFIG='Debug', REVISION='', **_kwargs): return ConfigGroup( INTERNAL=Static(INTERNAL), REPO_NAME=Static(REPO_NAME), REPO_URL=Static(REPO_URL), BUILD_CONFIG=Static(BUILD_CONFIG), revision=Single(basestring, empty_val=REVISION), revisions=Dict(value_type=(basestring, types.NoneType)), asan_symbolize=Single(bool, required=False, empty_val=False), get_app_manifest_vars=Single(bool, required=False, empty_val=True), run_tree_truth=Single(bool, required=False, empty_val=True), deps_file=Single(basestring, required=False, empty_val='.DEPS.git'), internal_dir_name=Single(basestring, required=False), # deps_dir: where to checkout the gclient deps file deps_dir=Single(basestring, required=False, empty_val=REPO_NAME), managed=Single(bool, required=False, empty_val=True), extra_deploy_opts=List(inner_type=basestring), tests=List(inner_type=basestring), cr_build_android=Static(Path('[CHECKOUT]', 'build', 'android')), test_runner=Single(Path), gclient_custom_deps=Dict(value_type=(basestring, types.NoneType)), channel=Single(basestring, empty_val='chrome'), gclient_custom_vars=Dict(value_type=(basestring, types.NoneType)), coverage=Single(bool, required=False, empty_val=False), chrome_specific_wipe=Single(bool, required=False, empty_val=False), incremental_coverage=Single(bool, required=False, empty_val=False), env=ConfigGroup(LLVM_FORCE_HEAD_REVISION=Single(basestring, required=False), ), )
def BaseConfig(USE_MIRROR=True, GIT_MODE=False, CACHE_DIR=None, PATCH_PROJECT=None, BUILDSPEC_VERSION=None, **_kwargs): deps = '.DEPS.git' if GIT_MODE else 'DEPS' cache_dir = str(CACHE_DIR) if GIT_MODE and CACHE_DIR else None return ConfigGroup( solutions = ConfigList( lambda: ConfigGroup( name = Single(basestring), url = Single(basestring), deps_file = Single(basestring, empty_val=deps, required=False, hidden=False), managed = Single(bool, empty_val=True, required=False, hidden=False), custom_deps = Dict(value_type=(basestring, types.NoneType)), custom_vars = Dict(value_type=basestring), safesync_url = Single(basestring, required=False), revision = Single( (basestring, gclient_api.RevisionResolver), required=False, hidden=True), ) ), deps_os = Dict(value_type=basestring), hooks = List(basestring), target_os = Set(basestring), target_os_only = Single(bool, empty_val=False, required=False), cache_dir = Static(cache_dir, hidden=False), # If supplied, use this as the source root (instead of the first solution's # checkout). src_root = Single(basestring, required=False, hidden=True), # Maps 'solution' -> build_property got_revision_mapping = Dict(hidden=True), # Addition revisions we want to pass in. For now theres a duplication # of code here of setting custom vars AND passing in --revision. We hope # to remove custom vars later. revisions = Dict( value_type=(basestring, gclient_api.RevisionResolver), hidden=True), # TODO(iannucci): HACK! The use of None here to indicate that we apply this # to the solution.revision field is really terrible. I mostly blame # gclient. # Maps 'parent_build_property' -> 'custom_var_name' # Maps 'parent_build_property' -> None # If value is None, the property value will be applied to # solutions[0].revision. Otherwise, it will be applied to # solutions[0].custom_vars['custom_var_name'] parent_got_revision_mapping = Dict(hidden=True), delete_unversioned_trees = Single(bool, empty_val=True, required=False), # Check out refs/branch-heads. # TODO (machenbach): Only implemented for bot_update atm. with_branch_heads = Single( bool, empty_val=False, required=False, hidden=True), GIT_MODE = Static(bool(GIT_MODE)), USE_MIRROR = Static(bool(USE_MIRROR)), PATCH_PROJECT = Static(str(PATCH_PROJECT), hidden=True), BUILDSPEC_VERSION= Static(BUILDSPEC_VERSION, hidden=True), )
def BaseConfig(**_kwargs): return ConfigGroup( repo_location=Single(Path) )
'path', 'platform', 'properties', 'service_account', 'step', ] PROPERTIES = { 'use_pkg': Property(default=False, kind=bool), 'pkg_files': Property(default=(), kind=List(str)), 'pkg_dirs': Property(default=(), kind=ConfigList(lambda: ConfigGroup( path=Single(str), exclusions=List(str), ))), 'pkg_vars': Property(default=None, kind=dict), 'ver_files': Property(default=(), kind=List(str)), 'install_mode': Property(default=None), 'refs': Property(default=['fake-ref-1', 'fake-ref-2'], kind=List(str)), 'tags': Property(kind=dict, default={ 'fake_tag_1': 'fake_value_1', 'fake_tag_2': 'fake_value_2'
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. DEPS = [ 'recipe_engine/cipd', 'recipe_engine/context', 'recipe_engine/json', 'recipe_engine/path', 'recipe_engine/step', ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$depot_tools/windows_sdk': Property( help='Properties specifically for the infra windows_sdk module.', param_name='sdk_properties', kind=ConfigGroup( # CIPD instance ID, tag or ref for the Windows SDK version. version=Single(str), ), default={'version': 'uploaded:2018-06-13'}, ) }
def BaseConfig(HOST_PLATFORM, HOST_ARCH, HOST_BITS, TARGET_PLATFORM, TARGET_ARCH, TARGET_BITS, BUILD_CONFIG, TARGET_CROS_BOARD, BUILD_PATH, CHECKOUT_PATH, **_kwargs): equal_fn = lambda tup: ('%s=%s' % (tup[0], pipes.quote(str(tup[1])))) return ConfigGroup( compile_py=ConfigGroup( default_targets=Set(basestring), build_args=List(basestring), compiler=Single(basestring, required=False), mode=Single(basestring, required=False), goma_dir=Single(Path, required=False), goma_canary=Single(bool, empty_val=False, required=False), show_ninja_stats=Single(bool, empty_val=False, required=False), goma_hermetic=Single(basestring, required=False), goma_enable_remote_link=Single(bool, empty_val=False, required=False), goma_store_local_run_output=Single(bool, empty_val=False, required=False), goma_failfast=Single(bool, empty_val=False, required=False), goma_max_active_fail_fallback_tasks=Single(int, empty_val=None, required=False), goma_enable_localoutputcache=Single(bool, empty_val=False, required=False), xcode_sdk=Single(basestring, required=False), ninja_confirm_noop=Single(bool, empty_val=False, required=False), set_build_data_dir=Single(bool, empty_val=False, required=False), goma_high_parallel=Single(bool, empty_val=False, required=False), ), runtest_py=ConfigGroup(src_side=Single(bool), ), gyp_env=ConfigGroup( DOWNLOAD_VR_TEST_APKS=Single(int, required=False), GYP_CROSSCOMPILE=Single(int, jsonish_fn=str, required=False), GYP_CHROMIUM_NO_ACTION=Single(int, jsonish_fn=str, required=False), GYP_DEFINES=Dict(equal_fn, ' '.join, (basestring, int, Path)), GYP_GENERATORS=Set(basestring, ','.join), GYP_GENERATOR_FLAGS=Dict(equal_fn, ' '.join, (basestring, int)), GYP_INCLUDE_LAST=Single(Path, required=False), GYP_LINK_CONCURRENCY=Single(int, required=False), GYP_MSVS_VERSION=Single(basestring, required=False), GYP_USE_SEPARATE_MSPDBSRV=Single(int, jsonish_fn=str, required=False), LLVM_DOWNLOAD_GOLD_PLUGIN=Single(int, required=False), ), env=ConfigGroup( PATH=List(Path), ADB_VENDOR_KEYS=Single(Path, required=False), LLVM_FORCE_HEAD_REVISION=Single(basestring, required=False), GOMA_STUBBY_PROXY_IP_ADDRESS=Single(basestring, required=False), FORCE_MAC_TOOLCHAIN=Single(int, required=False), FORCE_MAC_TOOLCHAIN_REVISION_OVERRIDE=Single(basestring, required=False), ), project_generator=ConfigGroup( tool=Single(basestring, empty_val='gyp'), args=Set(basestring), ), build_dir=Single(Path), cros_sdk=ConfigGroup( external=Single(bool, empty_val=True, required=False), args=List(basestring), ), runtests=ConfigGroup( enable_memcheck=Single(bool, empty_val=False, required=False), memory_tests_runner=Single(Path), enable_lsan=Single(bool, empty_val=False, required=False), test_args=List(basestring), run_asan_test=Single(bool, required=False), swarming_extra_args=List(basestring), swarming_tags=Set(basestring), ), # Some platforms do not have a 1:1 correlation of BUILD_CONFIG to what is # passed as --target on the command line. build_config_fs=Single(basestring), BUILD_CONFIG=Static(check(BUILD_CONFIG, BUILD_CONFIGS)), HOST_PLATFORM=Static(check(HOST_PLATFORM, HOST_PLATFORMS)), HOST_ARCH=Static(check(HOST_ARCH, HOST_ARCHS)), HOST_BITS=Static(check(HOST_BITS, HOST_TARGET_BITS)), TARGET_PLATFORM=Static(check(TARGET_PLATFORM, TARGET_PLATFORMS)), TARGET_ARCH=Static(check(TARGET_ARCH, TARGET_ARCHS)), TARGET_BITS=Static(check(TARGET_BITS, HOST_TARGET_BITS)), TARGET_CROS_BOARD=Static(TARGET_CROS_BOARD), BUILD_PATH=Static(BUILD_PATH), CHECKOUT_PATH=Static(CHECKOUT_PATH), gn_args=List(basestring), lto=Single(bool, empty_val=False, required=False), clobber_before_runhooks=Single(bool, empty_val=False, required=False, hidden=False), )
def BaseConfig(USE_MIRROR=True, GIT_MODE=False, CACHE_DIR=None, PATCH_PROJECT=None, BUILDSPEC_VERSION=None, **_kwargs): deps = '.DEPS.git' if GIT_MODE else 'DEPS' cache_dir = str(CACHE_DIR) if CACHE_DIR else None return ConfigGroup( solutions=ConfigList(lambda: ConfigGroup( name=Single(basestring), url=Single(basestring), deps_file=Single( basestring, empty_val=deps, required=False, hidden=False), managed=Single(bool, empty_val=True, required=False, hidden=False), custom_deps=Dict(value_type=(basestring, types.NoneType)), custom_vars=Dict(value_type=basestring), safesync_url=Single(basestring, required=False), revision=Single((basestring, gclient_api.RevisionResolver), required=False, hidden=True), )), deps_os=Dict(value_type=basestring), hooks=List(basestring), target_os=Set(basestring), target_os_only=Single(bool, empty_val=False, required=False), cache_dir=Static(cache_dir, hidden=False), # If supplied, use this as the source root (instead of the first solution's # checkout). src_root=Single(basestring, required=False, hidden=True), # Maps 'solution' -> build_property got_revision_mapping=Dict(hidden=True), # Addition revisions we want to pass in. For now theres a duplication # of code here of setting custom vars AND passing in --revision. We hope # to remove custom vars later. revisions=Dict(value_type=(basestring, gclient_api.RevisionResolver), hidden=True), # TODO(iannucci): HACK! The use of None here to indicate that we apply this # to the solution.revision field is really terrible. I mostly blame # gclient. # Maps 'parent_build_property' -> 'custom_var_name' # Maps 'parent_build_property' -> None # If value is None, the property value will be applied to # solutions[0].revision. Otherwise, it will be applied to # solutions[0].custom_vars['custom_var_name'] parent_got_revision_mapping=Dict(hidden=True), delete_unversioned_trees=Single(bool, empty_val=True, required=False), # Maps patch_project to (solution/path, revision). # - solution/path is then used to apply patches as patch root in # bot_update. # - if revision is given, it's passed verbatim to bot_update for # corresponding dependency. Otherwise (ie None), the patch will be # applied on top of version pinned in DEPS. # This is essentially a whitelist of which projects inside a solution # can be patched automatically by bot_update based on PATCH_PROJECT # property. # For example, bare chromium solution has this entry in patch_projects # 'angle/angle': ('src/third_party/angle', 'HEAD') # then a patch to Angle project can be applied to a chromium src's # checkout after first updating Angle's repo to its master's HEAD. patch_projects=Dict(value_type=tuple, hidden=True), # Check out refs/branch-heads. # TODO (machenbach): Only implemented for bot_update atm. with_branch_heads=Single(bool, empty_val=False, required=False, hidden=True), GIT_MODE=Static(bool(GIT_MODE)), USE_MIRROR=Static(bool(USE_MIRROR)), # TODO(tandrii): remove PATCH_PROJECT field. # DON'T USE THIS. WILL BE REMOVED. PATCH_PROJECT=Static(str(PATCH_PROJECT), hidden=True), BUILDSPEC_VERSION=Static(BUILDSPEC_VERSION, hidden=True), )
# Copyright 2018 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. DEPS = [ ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$recipe_engine/random': Property( help='Properties to control the `random` module.', param_name='module_properties', kind=ConfigGroup( # help='A seed to be passed to random.' seed=Single(int, required=False), ), default={}, ) }
'recipe_engine/path', 'recipe_engine/platform', 'recipe_engine/step', 'recipe_engine/version', ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$depot_tools/osx_sdk': Property( help='Properties specifically for the infra osx_sdk module.', param_name='sdk_properties', kind=ConfigGroup( # pylint: disable=line-too-long # XCode build version number. Internally maps to an XCode build id like # '9c40b'. See # # https://chrome-infra-packages.appspot.com/p/infra_internal/ios/xcode/mac/+/ # # For an up to date list of the latest SDK builds. sdk_version=Single(str), # The CIPD toolchain tool package and version toolchain_pkg=Single(str), toolchain_ver=Single(str), ), default={}, ) }
# Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. DEPS = [ 'recipe_engine/cipd', 'recipe_engine/context', 'recipe_engine/json', 'recipe_engine/path', 'recipe_engine/platform', 'recipe_engine/step', ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$gn/windows_sdk': Property( help='Properties specifically for the windows_sdk module.', param_name='sdk_properties', kind=ConfigGroup( # The CIPD package and version. sdk_package=Single(str), sdk_version=Single(str)), default={ 'sdk_package': 'chrome_internal/third_party/sdk/windows', 'sdk_version': 'uploaded:2019-09-06' }, ) }
help='The target tester to match test config to. If the tests are run ' 'on a builder, just treat the builder as a tester.'), 'good_revision': Property(kind=str, help='The last known good revision.'), 'bad_revision': Property(kind=str, help='The first known good revision.'), 'tests': Property( kind=Dict(value_type=list), help='The failed tests, the test name should be full name, e.g.: {' ' "browser_tests": [' ' "suite.test1", "suite.test2"' ' ]' '}'), 'use_analyze': Property(kind=Single(bool, empty_val=False, required=False), default=True, help='Use analyze to skip commits that do not affect tests.'), } class TestResult(object): SKIPPED = 'skipped' # A commit doesn't impact the test. PASSED = 'passed' # The compile or test passed. FAILED = 'failed' # The compile or test failed. def _compile_and_test_at_revision(api, target_mastername, target_buildername, target_testername, revision, requested_tests, use_analyze): results = {}
def BaseConfig(**_kwargs): return ConfigGroup( pool=Single(basestring), api_key_file_url=Single(basestring), api_secret_file_url=Single(basestring), )
def BaseConfig(**kwargs): return ConfigGroup( use_new_logic = Single(bool), )
def BaseConfig(**_kwargs): return ConfigGroup( test_results_server = Single(basestring))