def _protoc_direct_dependencies_rules(self): if config.get_item('proto_library_config', 'protoc_direct_dependencies'): dependencies = self.protoc_direct_dependencies() dependencies += config.get_item('proto_library_config', 'well_known_protos') env_name = self._env_name() self._write_rule('%s.Append(PROTOCFLAGS="--direct_dependencies %s")' % ( env_name, ':'.join(dependencies)))
def _proto_go_rules(self): """Generate go files. """ env_name = self._env_name() var_name = self._var_name('go') go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit('%s: go_home is not configured in BLADE_ROOT.' % self.fullname) proto_go_path = config.get_item('proto_library_config', 'protobuf_go_path') self._write_rule('%s.Replace(PROTOBUFGOPATH="%s")' % (env_name, proto_go_path)) self._write_rule('%s = []' % var_name) for src in self.srcs: proto_src = os.path.join(self.path, src) go_src = self._proto_gen_go_file(src) go_src_var = self._var_name_of(src, 'go_src') self._write_rule('%s = %s.ProtoGo("%s", "%s")' % ( go_src_var, env_name, go_src, proto_src)) # Copy the generated go sources to $GOPATH # according to the standard go directory layout proto_dir = os.path.dirname(src) proto_name = os.path.basename(src) go_dst = os.path.join(go_home, 'src', proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) go_dst_var = self._var_name_of(src, 'go_dst') self._write_rule('%s = %s.ProtoGoSource("%s", %s)' % ( go_dst_var, env_name, go_dst, go_src_var)) self._write_rule('%s.append(%s)' % (var_name, go_dst_var)) self._add_target_var('go', var_name)
def ninja_proto_go_rules(self, plugin_flags): go_home = config.get_item('go_config', 'go_home') protobuf_go_path = config.get_item('proto_library_config', 'protobuf_go_path') generated_goes = [] for src in self.srcs: path = self._source_file_path(src) package = self._get_go_package_name(path) if not package.startswith(protobuf_go_path): console.warning('%s: go_package "%s" is not starting with "%s" in %s' % (self.fullname, package, protobuf_go_path, src)) basename = os.path.basename(src) output = os.path.join(go_home, 'src', package, '%s.pb.go' % basename[:-6]) self.ninja_build(output, 'protogo', inputs=path) generated_goes.append(output) self._add_target_file('gopkg', generated_goes)
def _generate_java_source_encoding(self): source_encoding = self.data.get('source_encoding') if source_encoding is None: source_encoding = config.get_item('java_config', 'source_encoding') if source_encoding: self._write_rule('%s.Append(JAVACFLAGS="-encoding %s")' % ( self._env_name(), source_encoding))
def _generate_scala_source_encoding(self): source_encoding = self.data.get('source_encoding') if not source_encoding: source_encoding = config.get_item('scala_config', 'source_encoding') if source_encoding: self._write_rule('%s.Append(SCALACFLAGS=["-encoding %s"])' % ( self._env_name(), source_encoding))
def gen_targets_rules(self): """Get the build rules and return to the object who queries this. """ rules_buf = [] skip_test = getattr(self.__options, 'no_test', False) skip_package = not getattr(self.__options, 'generate_package', False) native_builder = config.get_item('global_config', 'native_builder') for k in self.__sorted_targets_keys: target = self.__build_targets[k] if not self._is_scons_object_type(target.type): continue blade_object = self.__target_database.get(k, None) if not blade_object: console.warning('not registered blade object, key %s' % str(k)) continue if (skip_test and target.type.endswith('_test') and k not in self.__direct_targets): continue if (skip_package and target.type == 'package' and k not in self.__direct_targets): continue if native_builder == 'ninja': blade_object.ninja_rules() else: blade_object.scons_rules() rules = blade_object.get_rules() if rules: rules_buf.append('\n') rules_buf += rules return rules_buf
def _generate_scala_warnings(self): warnings = self.data.get('warnings') if not warnings: warnings = config.get_item('scala_config', 'warnings') if not warnings: warnings = '-nowarn' self._write_rule('%s.Append(SCALACFLAGS=["%s"])' % ( self._env_name(), warnings))
def _dump_compdb(options, output_file_name): native_builder = config.get_item('global_config', 'native_builder') if native_builder != 'ninja': console.error_exit('dump compdb only work when native_builder is ninja') rules = build_manager.instance.get_all_rule_names() cmd = ['ninja', '-t', 'compdb'] + rules cmdstr = subprocess.list2cmdline(cmd) cmdstr += ' > ' cmdstr += output_file_name return _run_native_builder(cmdstr)
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, testdata, kwargs): ScalaFatLibrary.__init__(self, name, srcs, deps, resources, source_encoding, warnings, [], kwargs) self.type = 'scala_test' self.data['testdata'] = var_to_list(testdata) scalatest_libs = config.get_item('scala_test_config', 'scalatest_libs') if scalatest_libs: self._add_hardcode_java_library(scalatest_libs) else: console.warning('scalatest jar was not configured')
def _prepare_to_generate_rule(self): """Should be overridden. """ self._check_deprecated_deps() self._clone_env() self._generate_java_source_encoding() warnings = self.data.get('warnings') if warnings is None: warnings = config.get_item('java_config', 'warnings') if warnings: self._write_rule('%s.Append(JAVACFLAGS=%s)' % ( self._env_name(), warnings))
def setup_build_dir(options): build_path_format = config.get_item('global_config', 'build_path_template') s = Template(build_path_format) build_dir = s.substitute(bits=options.bits, profile=options.profile) if not os.path.exists(build_dir): os.mkdir(build_dir) try: os.remove('blade-bin') except os.error: pass os.symlink(os.path.abspath(build_dir), 'blade-bin') return build_dir
def _prepare_env(self, target): """Prepare the test environment. """ runfiles_dir = self._runfiles_dir(target) shutil.rmtree(runfiles_dir, ignore_errors=True) os.mkdir(runfiles_dir) # Build profile symlink profile_link_name = os.path.basename(self.build_dir) os.symlink(os.path.abspath(self.build_dir), os.path.join(runfiles_dir, profile_link_name)) # Prebuilt library symlink for prebuilt_file in self._get_prebuilt_files(target): src = os.path.abspath(prebuilt_file[0]) dst = os.path.join(runfiles_dir, prebuilt_file[1]) if os.path.lexists(dst): console.warning('trying to make duplicate prebuilt symlink:\n' '%s -> %s\n' '%s -> %s already exists\n' 'skipped, should check duplicate prebuilt ' 'libraries' % (dst, src, dst, os.path.realpath(dst))) continue os.symlink(src, dst) self._prepare_test_data(target) run_env = dict(os.environ) environ_add_path(run_env, 'LD_LIBRARY_PATH', runfiles_dir) run_lib_paths = config.get_item('cc_binary_config', 'run_lib_paths') if run_lib_paths: for path in run_lib_paths: if path.startswith('//'): path = path[2:] path = os.path.abspath(path) environ_add_path(run_env, 'LD_LIBRARY_PATH', path) java_home = config.get_item('java_config', 'java_home') if java_home: java_home = os.path.abspath(java_home) environ_add_path(run_env, 'PATH', os.path.join(java_home, 'bin')) return run_env
def __init__(self, options, build_dir, build_platform, build_environment, svn_roots): self.rules_buf = [] self.options = options self.build_dir = build_dir self.cc = build_platform.get_cc() self.cc_version = build_platform.get_cc_version() self.python_inc = build_platform.get_python_include() self.cuda_inc = build_platform.get_cuda_include() self.build_environment = build_environment self.ccflags_manager = CcFlagsManager(options, build_dir, build_platform) self.svn_roots = svn_roots self.distcc_enabled = config.get_item('distcc_config', 'enabled')
def generate_scalatest_rule(self, java_config): java = self.get_java_command(java_config, 'java') scala = 'scala' scala_home = config.get_item('scala_config', 'scala_home') if scala_home: scala = os.path.join(scala_home, 'bin', scala) jacocoagent = self.get_jacocoagent() args = ( '--java=%s --scala=%s --jacocoagent=%s --packages_under_test=${packages_under_test} ' '--script=${out} ${in}') % (java, scala, jacocoagent) self.generate_rule(name='scalatest', command=self._builtin_command('scala_test', args), description='SCALA TEST ${out}')
def generate_java_test_rules(self): jacoco_home = config.get_item('java_test_config', 'jacoco_home') if jacoco_home: jacoco_agent = os.path.join(jacoco_home, 'lib', 'jacocoagent.jar') prefix = 'JACOCOAGENT=%s' % jacoco_agent else: prefix = '' self._add_rule('javatargetundertestpkg = __targetundertestpkg__') args = '${mainclass} ${javatargetundertestpkg} ${out} ${in}' self.generate_rule(name='javatest', command=self.generate_toolchain_command( 'java_test', prefix=prefix, suffix=args), description='JAVA TEST ${out}')
def __init__(self, options, build_dir, gcc_version, python_inc, cuda_inc, build_environment, svn_roots): self.rules_buf = [] self.options = options self.build_dir = build_dir self.gcc_version = gcc_version self.python_inc = python_inc self.cuda_inc = cuda_inc self.build_environment = build_environment self.ccflags_manager = CcFlagsManager(options, build_dir, gcc_version) self.svn_roots = svn_roots self.distcc_enabled = config.get_item('distcc_config', 'enabled')
def generate_go_rules(self): go_home = config.get_item('go_config', 'go_home') go = config.get_item('go_config', 'go') if go_home and go: go_pool = 'golang_pool' self._add_rule(''' pool %s depth = 1''' % go_pool) go_path = os.path.normpath(os.path.abspath(go_home)) prefix = 'GOPATH=%s %s' % (go_path, go) self.generate_rule(name='gopackage', command='%s install ${extra_goflags} ${package}' % prefix, description='GOLANG PACKAGE ${package}', pool=go_pool) self.generate_rule(name='gocommand', command='%s build -o ${out} ${extra_goflags} ${package}' % prefix, description='GOLANG COMMAND ${package}', pool=go_pool) self.generate_rule(name='gotest', command='%s test -c -o ${out} ${extra_goflags} ${package}' % prefix, description='GOLANG TEST ${package}', pool=go_pool)
def _proto_go_rules(self): """Generate go files. """ env_name = self._env_name() var_name = self._var_name('go') go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit('%s: go_home is not configured in BLADE_ROOT.' % self.fullname) proto_go_path = config.get_item('proto_library_config', 'protobuf_go_path') go_module_enabled = config.get_item('go_config', 'go_module_enabled') go_module_relpath = config.get_item('go_config', 'go_module_relpath') self._write_rule('%s.Replace(PROTOBUFGOPATH="%s")' % (env_name, proto_go_path)) self._write_rule('%s = []' % var_name) for src in self.srcs: proto_src = os.path.join(self.path, src) go_src = self._proto_gen_go_file(src) go_src_var = self._var_name_of(src, 'go_src') self._write_rule('%s = %s.ProtoGo("%s", "%s")' % (go_src_var, env_name, go_src, proto_src)) # Copy the generated go sources to $GOPATH # according to the standard go directory layout proto_dir = os.path.dirname(src) proto_name = os.path.basename(src) if go_module_enabled and not go_module_relpath: go_dst = os.path.join(proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) else: go_dst = os.path.join(go_home, 'src', proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) go_dst_var = self._var_name_of(src, 'go_dst') self._write_rule('%s = %s.ProtoGoSource("%s", %s)' % (go_dst_var, env_name, go_dst, go_src_var)) self._write_rule('%s.append(%s)' % (var_name, go_dst_var)) self._add_target_var('go', var_name)
def clean(options): console.info('cleaning...(hint: please specify --generate-dynamic to ' 'clean your so)') native_builder = config.get_item('global_config', 'native_builder') cmd = [native_builder] # cmd += native_builder_options(options) if native_builder == 'ninja': cmd += ['-t', 'clean'] else: cmd += ['--duplicate=soft-copy', '-c', '-s', '--cache-show'] cmdstr = subprocess.list2cmdline(cmd) returncode = _run_native_builder(cmdstr) console.info('cleaning done.') return returncode
def _set_go_package(self): """ Set the package path from the source path inside the workspace specified by GOPATH. All the go sources of the same package should be in the same directory. """ srcs = [self._source_file_path(s) for s in self.srcs] dirs = set([os.path.dirname(s) for s in srcs]) if len(dirs) != 1: console.error_exit('%s: Go sources belonging to the same package ' 'should be in the same directory. Sources: %s' % (self.fullname, ', '.join(self.srcs))) go_home = config.get_item('go_config', 'go_home') self.data['go_package'] = os.path.relpath(self.path, os.path.join(go_home, 'src'))
def generate_cc_vars(self): warnings, cxx_warnings, c_warnings = self._get_warning_flags() c_warnings += warnings cxx_warnings += warnings # optimize_flags is need for `always_optimize` optimize_flags = config.get_item('cc_config', 'optimize') optimize = '$optimize_flags' if self.options.profile == 'release' else '' self._add_rule(textwrap.dedent('''\ c_warnings = %s cxx_warnings = %s optimize_flags = %s optimize = %s ''') % (' '.join(c_warnings), ' '.join(cxx_warnings), ' '.join(optimize_flags), optimize))
def _dump_compdb(options, output_file_name): backend_builder = config.get_item('global_config', 'backend_builder') if backend_builder != 'ninja': console.fatal('Dump compdb only work when backend_builder is ninja') rules = build_manager.instance.get_all_rule_names() cmd = [ 'ninja', '-f', build_manager.instance.build_script(), '-t', 'compdb' ] cmd += rules cmdstr = subprocess.list2cmdline(cmd) cmdstr += ' > ' cmdstr += output_file_name return _run_backend_builder(cmdstr)
def dump(options): native_builder = config.get_item('global_config', 'native_builder') if native_builder != 'ninja': console.error_exit( 'dump compdb only work when native_builder is ninja') if options.compdb: output_file_name = os.path.join(_WORKING_DIR, options.compdb_path) rules = build_manager.instance.get_all_rule_names() cmd = ['ninja', '-t', 'compdb'] + rules cmdstr = subprocess.list2cmdline(cmd) cmdstr += ' > ' cmdstr += output_file_name console.info('write compdb to %s' % options.compdb_path) return _run_native_builder(cmdstr)
def generate_go_rules(self): go_home = config.get_item('go_config', 'go_home') go = config.get_item('go_config', 'go') go_module_enabled = config.get_item('go_config', 'go_module_enabled') go_module_relpath = config.get_item('go_config', 'go_module_relpath') if go_home and go: go_pool = 'golang_pool' self._add_rule(textwrap.dedent('''\ pool %s depth = 1 ''') % go_pool) go_path = os.path.normpath(os.path.abspath(go_home)) out_relative = "" if go_module_enabled: prefix = go if go_module_relpath: relative_prefix = os.path.relpath(prefix, go_module_relpath) prefix = "cd {go_module_relpath} && {relative_prefix}".format( go_module_relpath=go_module_relpath, relative_prefix=relative_prefix, ) # add slash to the end of the relpath out_relative = os.path.join(os.path.relpath("./", go_module_relpath), "") else: prefix = 'GOPATH=%s %s' % (go_path, go) self.generate_rule(name='gopackage', command='%s install ${extra_goflags} ${package}' % prefix, description='GO INSTALL ${package}', pool=go_pool) self.generate_rule(name='gocommand', command='%s build -o %s${out} ${extra_goflags} ${package}' % (prefix, out_relative), description='GO BUILD ${package}', pool=go_pool) self.generate_rule(name='gotest', command='%s test -c -o %s${out} ${extra_goflags} ${package}' % (prefix, out_relative), description='GO TEST ${package}', pool=go_pool)
def verify(self): """Verify specific targets after build is complete. """ verify_history = self._load_verify_history() error = 0 header_inclusion_dependencies = config.get_item('cc_config', 'header_inclusion_dependencies') header_inclusion_history = verify_history['header_inclusion_dependencies'] for k in self.__sorted_targets_keys: target = self.__build_targets[k] if (header_inclusion_dependencies and target.type == 'cc_library' and target.srcs): if not target.verify_header_inclusion_dependencies(header_inclusion_history): error += 1 self._dump_verify_history() return error == 0
def _init_go_environment(self): if GoTarget._go_os is None and GoTarget._go_arch is None: go = config.get_item('go_config', 'go') p = subprocess.Popen('%s env' % go, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) stdout, stderr = p.communicate() if p.returncode: self.fatal('Failed to initialize go environment: %s' % stderr) for line in stdout.splitlines(): if line.startswith('GOOS='): GoTarget._go_os = line.replace('GOOS=', '').strip('"') elif line.startswith('GOARCH='): GoTarget._go_arch = line.replace('GOARCH=', '').strip('"')
def test_jobs_num(self): """Calculate the number of test jobs""" # User has the highest priority jobs_num = config.get_item('global_config', 'test_jobs') if jobs_num > 0: return jobs_num # In distcc enabled mode, the build_jobs_num may be quiet large, but we # only support run test locally, so the test_jobs_num should be limited # by local cpu mumber. # WE limit the test_jobs_num to be half of build job number because test # may be heavier than build (may be not, perhaps). build_jobs_num = self.build_jobs_num() cpu_core_num = cpu_count() jobs_num = max(min(build_jobs_num, cpu_core_num) / 2, 1) console.info('Adjust build jobs number(-j N) to be %d' % jobs_num) return jobs_num
def build(options): _check_code_style(_TARGETS) console.info('building...') console.flush() if config.get_item('global_config', 'native_builder') == 'ninja': returncode = _ninja_build(options) else: returncode = _scons_build(options) if returncode != 0: console.error('building failure.') return returncode if not build_manager.instance.verify(): console.error('building failure.') return 1 console.info('building done.') return 0
def verify(self): """Verify specific targets after build is complete. """ if not config.get_item('cc_config', 'header_inclusion_dependencies'): return True verify_history = self._load_verify_history() header_inclusion_history = verify_history[ 'header_inclusion_dependencies'] error = 0 for k in self.__sorted_targets_keys: target = self.__build_targets[k] if target.type.startswith('cc_') and target.srcs: if not target.verify_header_inclusion_dependencies( header_inclusion_history): error += 1 self._dump_verify_history() return error == 0
def generate_scalac_rule(self, java_config): scalac = 'scalac' scala_home = config.get_item('scala_config', 'scala_home') if scala_home: scalac = os.path.join(scala_home, 'bin', scalac) java = self.get_java_command(java_config, 'java') self._add_rule( textwrap.dedent('''\ scalacflags = -nowarn ''')) cmd = [ 'JAVACMD=%s' % java, scalac, '-encoding UTF8', '-d ${out}', '-classpath ${classpath}', '${scalacflags}', '${in}' ] self.generate_rule(name='scalac', command=' '.join(cmd), description='SCALAC ${out}')
def __init__(self, name, type, srcs, deps, visibility, kwargs): """Init method. Init the target. """ from blade import build_manager self.blade = build_manager.instance self.build_dir = self.blade.get_build_dir() current_source_path = self.blade.get_current_source_path() self.target_database = self.blade.get_target_database() self.key = (current_source_path, name) self.fullname = '%s:%s' % self.key self.name = name self.path = current_source_path self.source_location = source_location(os.path.join(current_source_path, 'BUILD')) self.type = type self.srcs = srcs self.deps = [] self.expanded_deps = [] self.visibility = 'PUBLIC' self.data = {} self.data['test_timeout'] = config.get_item('global_config', 'test_timeout') # Keep track of target filess generated by this target. Note that one target rule # may correspond to several target files, such as: # proto_library: static lib/shared lib/jar variables self.data['targets'] = {} self.data['default_target'] = '' self._check_name() self._check_kwargs(kwargs) self._check_srcs() self._check_deps(deps) self._init_target_deps(deps) self._init_visibility(visibility) self.build_rules = [] self.data['generated_hdrs'] = []
def _check_srcs(self): """Check source files. """ dups = [] srcset = set() for s in self.srcs: if s in srcset: dups.append(s) else: srcset.add(s) if dups: console.error_exit('%s Duplicate source file paths: %s ' % (self.fullname, dups)) # Check if one file belongs to two different targets. action = config.get_item('global_config', 'duplicated_source_action') for s in self.srcs: if '..' in s or s.startswith('/'): console.error_exit( '%s Invalid source file path: %s. ' 'can only be relative path, and must in current directory ' 'or subdirectories.' % (self.fullname, s)) src = os.path.normpath(os.path.join(self.path, s)) target = self.fullname, self._allow_duplicate_source() if src not in Target.__src_target_map: Target.__src_target_map[src] = target else: target_existed = Target.__src_target_map[src] if target_existed != target: # Always preserve the target which disallows # duplicate source files in the map if target_existed[1]: Target.__src_target_map[src] = target elif target[1]: pass else: message = 'Source file %s belongs to {%s, %s}' % ( s, target_existed[0], target[0]) if action == 'error': console.error_exit(message) elif action == 'warning': console.warning(message)
def __extract_dep_jars(self, dkey, dep_jars, maven_jars): """Extract jar file built by the target with the specified dkey. dep_jars: a list of jars built by blade targets. Each item is either a scons var or a file path depending on the build system. maven_jars: a list of jars managed by maven repository. """ dep = self.target_database[dkey] if config.get_item('global_config', 'native_builder') == 'ninja': jar = dep._get_target_file('jar') else: jar = dep._get_target_var('jar') if jar: dep_jars.append(jar) else: jar = dep.data.get('binary_jar') if jar: assert dep.type == 'maven_jar' maven_jars.append(jar)
def __init__( self, name, srcs, deps, visibility, optimize, deprecated, kwargs): srcs = var_to_list(srcs) super(ThriftLibrary, self).__init__( name=name, type='thrift_library', srcs=srcs, src_exts=['thrift'], deps=deps, visibility=visibility, warning='', defs=[], incs=[], export_incs=[], optimize=optimize, extra_cppflags=[], extra_linkflags=[], kwargs=kwargs) thrift_libs = config.get_item('thrift_config', 'thrift_libs') # Hardcode deps rule to thrift libraries. self._add_implicit_library(thrift_libs) # Link all the symbols by default self.attr['link_all_symbols'] = True self.attr['deprecated'] = deprecated # For each thrift file initialize a ThriftHelper, which will be used # to get the source files generated from thrift file. sources, headers = [], [] self.thrift_helpers = {} for src in self.srcs: self.thrift_helpers[src] = ThriftHelper(self.path, src) thrift_files = self._thrift_gen_cpp_files(src) headers += [h for h in thrift_files if h.endswith('.h')] self.attr['generated_hdrs'] = headers
def run(self): """Run all the test target programs. """ self._collect_test_jobs() tests_run_list = [] for target_key in self.test_jobs: target = self.target_database[target_key] test_env = self._prepare_env(target) cmd = [os.path.abspath(self._executable(target))] cmd += self.options.args if console.color_enabled(): test_env['GTEST_COLOR'] = 'yes' else: test_env['GTEST_COLOR'] = 'no' test_env['GTEST_OUTPUT'] = 'xml' test_env['HEAPCHECK'] = target.data.get('heap_check', '') pprof_path = config.get_item('cc_test_config', 'pprof_path') if pprof_path: test_env['PPROF_PATH'] = os.path.abspath(pprof_path) if self.options.coverage: test_env['BLADE_COVERAGE'] = 'true' tests_run_list.append( (target, self._runfiles_dir(target), test_env, cmd)) console.notice('%d tests to run' % len(tests_run_list)) console.flush() scheduler = TestScheduler(tests_run_list, self.options.test_jobs) try: scheduler.schedule_jobs() except KeyboardInterrupt: console.clear_progress_bar() console.error('KeyboardInterrupt, all tests stopped') console.flush() if self.options.coverage: self._generate_coverage_report() self._clean_env() passed_run_results, failed_run_results = scheduler.get_results() self._save_test_history(passed_run_results, failed_run_results) self._show_tests_result(passed_run_results, failed_run_results) return 0 if len(passed_run_results) == len(self.test_jobs) else 1
def run(self): """Run all the test target programs. """ self._collect_test_jobs() tests_run_list = [] for target_key in self.test_jobs: target = self.target_database[target_key] test_env = self._prepare_env(target) cmd = [os.path.abspath(self._executable(target))] cmd += self.options.args if console.color_enabled(): test_env['GTEST_COLOR'] = 'yes' else: test_env['GTEST_COLOR'] = 'no' test_env['GTEST_OUTPUT'] = 'xml' test_env['HEAPCHECK'] = target.data.get('heap_check', '') pprof_path = config.get_item('cc_test_config', 'pprof_path') if pprof_path: test_env['PPROF_PATH'] = os.path.abspath(pprof_path) if self.options.coverage: test_env['BLADE_COVERAGE'] = 'true' tests_run_list.append((target, self._runfiles_dir(target), test_env, cmd)) console.notice('%d tests to run' % len(tests_run_list)) sys.stdout.flush() scheduler = TestScheduler(tests_run_list, self.options.test_jobs) try: scheduler.schedule_jobs() except KeyboardInterrupt: console.clear_progress_bar() console.error('KeyboardInterrupt, all tests stopped') console.flush() if self.options.coverage: self._generate_coverage_report() self._clean_env() passed_run_results, failed_run_results = scheduler.get_results() self._save_test_history(passed_run_results, failed_run_results) self._show_tests_result(passed_run_results, failed_run_results) return 0 if len(passed_run_results) == len(self.test_jobs) else 1
def __init__(self, name, srcs, deps, optimize, deprecated, blade, kwargs): """Init method. Init the thrift target. """ srcs = var_to_list(srcs) self._check_thrift_srcs_name(srcs) CcTarget.__init__(self, name, 'thrift_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) self.data['python_vars'] = [] self.data['python_sources'] = [] thrift_libs = config.get_item('thrift_config', 'thrift_libs') # Hardcode deps rule to thrift libraries. self._add_hardcode_library(thrift_libs) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated self.data['java_sources_explict_dependency'] = [] # For each thrift file initialize a ThriftHelper, which will be used # to get the source files generated from thrift file. self.thrift_helpers = {} for src in srcs: self.thrift_helpers[src] = ThriftHelper(self.path, src)
def _check_srcs(self): """Check source files. """ dups = [] srcset = set() for s in self.srcs: if s in srcset: dups.append(s) else: srcset.add(s) if dups: self.error('Duplicate source file paths: %s ' % dups) # Check if one file belongs to two different targets. action = config.get_item('global_config', 'duplicated_source_action') for src in self.srcs: if '..' in src or src.startswith('/'): self.error( 'Invalid source file path: %s. can only be relative path, and must ' 'in current directory or subdirectories.' % src) full_src = os.path.normpath(os.path.join(self.path, src)) target = self.fullname, self._allow_duplicate_source() if full_src not in Target.__src_target_map: Target.__src_target_map[full_src] = target else: target_existed = Target.__src_target_map[full_src] if target_existed != target: # Always preserve the target which disallows # duplicate source files in the map if target_existed[1]: Target.__src_target_map[full_src] = target elif target[1]: pass else: message = '"%s" is already in srcs of "%s"' % ( src, target_existed[0]) if action == 'error': self.error(message) elif action == 'warning': self.warning(message)
def parallel_jobs_num(self): """Tune the jobs num. """ # User has the highest priority user_jobs_num = self.__options.jobs if user_jobs_num > 0: return user_jobs_num # Calculate job numbers smartly distcc_enabled = config.get_item('distcc_config', 'enabled') if distcc_enabled and self.build_environment.distcc_env_prepared: # Distcc doesn't cost much local cpu, jobs can be quite large. distcc_num = len(self.build_environment.get_distcc_hosts_list()) jobs_num = min(max(int(1.5 * distcc_num), 1), 20) else: cpu_core_num = cpu_count() # machines with cpu_core_num > 4 is usually shared by multiple users, # set an upper bound to avoid interfering other users jobs_num = min(2 * cpu_core_num, 8) console.info('tunes the parallel jobs number(-j N) to be %d' % jobs_num) return jobs_num
def verify(self): """Verify specific targets after build is complete. """ verify_history = self._load_verify_history() header_inclusion_history = verify_history['header_inclusion_dependencies'] error = 0 verify_details = {} verify_suppress = config.get_item('cc_config', 'hdr_dep_missing_suppress') # Sorting helps reduce jumps between BUILD files when fixng reported problems for k in sorted(self.__expanded_command_targets): target = self.__build_targets[k] if target.type.startswith('cc_') and target.srcs: ok, details = target.verify_hdr_dep_missing( header_inclusion_history, verify_suppress.get(target.key, {})) if not ok: error += 1 if details: verify_details[target.key] = details self._dump_verify_details(verify_details) self._dump_verify_history() return error == 0
def _setup_env_java(self): env_java = 'env_java' self._add_rule('%s = top_env.Clone()' % env_java) java_config = config.get_section('java_config') version = java_config['version'] source_version = java_config.get('source_version', version) target_version = java_config.get('target_version', version) # JAVAVERSION must be set because scons need it to deduce class names # from java source, and the default value '1.5' is too low. java_version = version or '1.6' self._add_rule('%s.Replace(JAVAVERSION="%s")' % (env_java, java_version)) if source_version: self._add_rule('%s.Append(JAVACFLAGS="-source %s")' % ( env_java, source_version)) if target_version: self._add_rule('%s.Append(JAVACFLAGS="-target %s")' % ( env_java, target_version)) jacoco_home = config.get_item('java_test_config', 'jacoco_home') if jacoco_home: jacoco_agent = os.path.join(jacoco_home, 'lib', 'jacocoagent.jar') self._add_rule('%s.Replace(JACOCOAGENT="%s")' % (env_java, jacoco_agent))
def __init__(self, name, target_type, srcs, deps, visibility, blade, kwargs): """Init method. Init the target. """ self.blade = blade self.build_path = self.blade.get_build_path() current_source_path = self.blade.get_current_source_path() self.target_database = self.blade.get_target_database() self.key = (current_source_path, name) self.fullname = '%s:%s' % self.key self.name = name self.path = current_source_path self.type = target_type self.srcs = srcs self.deps = [] self.expanded_deps = [] self.visibility = 'PUBLIC' self.env_name = None self.data = {} self.data['test_timeout'] = config.get_item('global_config', 'test_timeout') # Keep track of scons variables defined by scons rules # generated by this target. Note that one blade target # may correspond to several scons variables: # proto_library: static lib/shared lib/jar variables self.data['targets'] = {} self.data['default_target'] = '' self._check_name() self._check_kwargs(kwargs) self._check_srcs() self._check_deps(deps) self._init_target_deps(deps) self._init_visibility(visibility) self.build_rules = [] self.data['generated_hdrs'] = []
def __init__( self, name, srcs, deps, visibility, optimize, deprecated, kwargs): srcs = var_to_list(srcs) self._check_thrift_srcs_name(srcs) super(ThriftLibrary, self).__init__( name=name, type='thrift_library', srcs=srcs, deps=deps, visibility=visibility, warning='', hdr_dep_missing_severity=None, defs=[], incs=[], export_incs=[], optimize=optimize, extra_cppflags=[], extra_linkflags=[], kwargs=kwargs) thrift_libs = config.get_item('thrift_config', 'thrift_libs') # Hardcode deps rule to thrift libraries. self._add_hardcode_library(thrift_libs) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated # For each thrift file initialize a ThriftHelper, which will be used # to get the source files generated from thrift file. self.thrift_helpers = {} for src in srcs: self.thrift_helpers[src] = ThriftHelper(self.path, src)
def __init__(self, name, srcs, deps, optimize, deprecated, blade, kwargs): srcs = var_to_list(srcs) self._check_thrift_srcs_name(srcs) CcTarget.__init__(self, name, 'thrift_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) self.data['python_vars'] = [] self.data['python_sources'] = [] thrift_libs = config.get_item('thrift_config', 'thrift_libs') # Hardcode deps rule to thrift libraries. self._add_hardcode_library(thrift_libs) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated self.data['java_sources_explict_dependency'] = [] # For each thrift file initialize a ThriftHelper, which will be used # to get the source files generated from thrift file. self.thrift_helpers = {} for src in srcs: self.thrift_helpers[src] = ThriftHelper(self.path, src)
def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ("Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def ninja_protoc_direct_dependencies(self, vars): if config.get_item('proto_library_config', 'protoc_direct_dependencies'): dependencies = self.protoc_direct_dependencies() dependencies += config.get_item('proto_library_config', 'well_known_protos') vars['protocflags'] = '--direct_dependencies %s' % ':'.join(dependencies)
def _generate_scala_target_platform(self): target_platform = config.get_item('scala_config', 'target_platform') if target_platform: self._write_rule('%s.Append(SCALACFLAGS=["-target:%s"])' % ( self._env_name(), target_platform))
def new_build_rules_generator(self): if config.get_item('global_config', 'native_builder') == 'ninja': return NinjaRulesGenerator('build.ninja', self.__blade_path, self) else: return SconsRulesGenerator('SConstruct', self.__blade_path, self)