def gen_targets_rules(self): """Get the build rules and return to the object who queries this. """ rules_buf = [] skip_test = getattr(self.__options, 'no_test', False) skip_package = not getattr(self.__options, 'generate_package', False) native_builder = config.get_item('global_config', 'native_builder') for k in self.__sorted_targets_keys: target = self.__build_targets[k] if not self._is_scons_object_type(target.type): continue blade_object = self.__target_database.get(k, None) if not blade_object: console.warning('not registered blade object, key %s' % str(k)) continue if (skip_test and target.type.endswith('_test') and k not in self.__direct_targets): continue if (skip_package and target.type == 'package' and k not in self.__direct_targets): continue if native_builder == 'ninja': blade_object.ninja_rules() else: blade_object.scons_rules() rules = blade_object.get_rules() if rules: rules_buf.append('\n') rules_buf += rules return rules_buf
def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ( "Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: java = 'java' java_home = config.get_item('java_config', 'java_home') if java_home: java = os.path.join(java_home, 'bin', 'java') cmd = [ '%s -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % (java, coverage_reporter, jacoco_libs) ] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.debug(cmd_str) if subprocess.call(cmd_str, shell=True) != 0: console.warning('Failed to generate java coverage report')
def ninja_rules(self): inputs, entries = [], [] for src, dst in self.data['sources']: inputs.append(src) entries.append(dst) targets = self.blade.get_build_targets() for key, type, dst in self.data['locations']: path = targets[key]._get_target_file(type) if not path: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) continue if not dst: dst = os.path.basename(path) inputs.append(path) entries.append(dst) output = self._target_file_path(self.data['out']) if not self.data['shell']: self.ninja_build(output, 'package', inputs=inputs, variables={'entries': ' '.join(entries)}) else: self.ninja_package_in_shell(output, inputs, entries)
def ninja_generate_resources(self): resources = self.data['resources'] locations = self.data['location_resources'] if not resources and not locations: return [] inputs, outputs = [], [] resources_dir = self._target_file_path() + '.resources' resources = self._process_regular_resources(resources) for src, dst in resources: inputs.append(src) outputs.append(os.path.join(resources_dir, dst)) targets = self.blade.get_build_targets() for key, type, dst in locations: path = targets[key]._get_target_file(type) if not path: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) continue if not dst: dst = os.path.basename(path) inputs.append(path) outputs.append(os.path.join(resources_dir, dst)) if inputs: self.ninja_build(outputs, 'javaresource', inputs=inputs) return outputs
def __init__(self, name, srcs, deps, visibility, tags, resources, source_encoding, warnings, exclusions, testdata, kwargs): super(ScalaTest, self).__init__(name=name, srcs=srcs, deps=deps, resources=resources, visibility=visibility, tags=tags, source_encoding=source_encoding, warnings=warnings, exclusions=exclusions, kwargs=kwargs) self.type = 'scala_test' self.attr['testdata'] = var_to_list(testdata) self._add_tags('type:test') if not self.srcs: self.warning('Empty scala test sources.') scalatest_libs = config.get_item('scala_test_config', 'scalatest_libs') if scalatest_libs: self._add_implicit_library(scalatest_libs) else: console.warning( 'Config: "scala_test_config.scalatest_libs" is not configured')
def _download_dependency(self, id, classifier, target): group, artifact, version = id.split(':') target_path = self._generate_jar_path(id) classpath = 'classpath.txt' log = 'classpath.log' log = os.path.join(target_path, log) if not self._need_download(os.path.join(target_path, classpath), version, log): return True # if classifier: # id = '%s:%s' % (id, classifier) # # Currently analyzing dependencies of classifier jar # # usually fails. Here when there is no classpath.txt # # file but classpath.log exists, that means the failure # # of analyzing dependencies last time # if (not os.path.exists(os.path.join(target_path, classpath)) # and os.path.exists(log)): # return False console.info('Downloading %s dependencies...' % id) pom = os.path.join(target_path, artifact + '-' + version + '.pom') cmd = ' '.join([self.__maven, 'dependency:build-classpath', '-DincludeScope=runtime', '-Dmdep.outputFile=%s' % classpath]) cmd += ' -e -X -f %s > %s' % (pom, log) if subprocess.call(cmd, shell=True) != 0: console.warning('//%s: Error occurred when resolving %s dependencies. ' 'Check %s for details.' % (target, id, log)) return False return True
def gen_targets_rules(self): """Get the build rules and return to the object who queries this.""" rules_buf = [] skip_test = getattr(self.__options, 'no_test', False) skip_package = not getattr(self.__options, 'generate_package', False) for k in self.__sorted_targets_keys: target = self.__build_targets[k] if not self._is_real_target_type(target.type): continue target = self.__target_database.get(k, None) if not target: console.warning('"%s" is not a registered blade object' % str(k)) continue if skip_test and target.type.endswith( '_test') and k not in self.__direct_targets: continue if skip_package and target.type == 'package' and k not in self.__direct_targets: continue target.before_generate() target_ninja = self._find_or_generate_target_ninja_file(target) if target_ninja: target._remove_on_clean(target_ninja) rules_buf += 'include %s\n' % target_ninja return rules_buf
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = ['java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % ( coverage_reporter, jacoco_libs)] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd_str) if subprocess.call(cmd_str, shell=True): console.warning('Failed to generate java coverage report')
def _show_slow_tests(self, passed_run_results, failed_run_results): slow_tests = (self._collect_slow_tests(passed_run_results) + self._collect_slow_tests(failed_run_results)) if slow_tests: console.warning('%d slow tests:' % len(slow_tests)) for cost_time, key in sorted(slow_tests): console.warning('%.4gs\t//%s:%s' % (cost_time, key[0], key[1]), prefix=False)
def _expand_target_patterns(blade, target_ids): """Expand target patterns from command line.""" # Parse command line target_ids. For those in the form of <path>:<target>, # record (<path>,<target>) in direct_targets; for the rest (with <path> # but without <target>), record <path> into starting_dirs. direct_targets = set() starting_dirs = set() for target_id in target_ids: source_dir, target_name = target_id.rsplit(':', 1) if not os.path.exists(source_dir): _report_not_exist('Directory', source_dir, source_dir, blade) if _is_under_skipped_dir(source_dir): console.warning('"%s" is under skipped directory, ignored' % target_id) continue if target_name == '...': for root, dirs, files in os.walk(source_dir): # Note the dirs[:] = slice assignment; we are replacing the # elements in dirs (and not the list referred to by dirs) so # that os.walk() will not process deleted directories. dirs[:] = [d for d in dirs if not _is_load_excluded(root, d)] if 'BUILD' in files: starting_dirs.add(root) elif target_name == '*': starting_dirs.add(source_dir) else: direct_targets.add(target_id) return direct_targets, starting_dirs
def gen_targets_rules(self): """Get the build rules and return to the object who queries this. """ rules_buf = [] skip_test = getattr(self.__options, 'no_test', False) skip_package = not getattr(self.__options, 'generate_package', False) for k in self.__sorted_targets_keys: target = self.__build_targets[k] if not self._is_real_target_type(target.type): continue blade_object = self.__target_database.get(k, None) if not blade_object: console.warning('"%s" is not a registered blade object' % str(k)) continue if skip_test and target.type.endswith( '_test') and k not in self.__direct_targets: continue if skip_package and target.type == 'package' and k not in self.__direct_targets: continue blade_object.ninja_rules() rules = blade_object.get_rules() if rules: rules_buf += rules return rules_buf
def _prepare_shared_libraries(self, target, runfiles_dir): """Prepare correct shared libraries for running target""" # Make symbolic links for shared libraries of the executable. # For normal built shared libraries, their path has been writen in the executable. # For example, `build64_release/common/crypto/hash/libhash.so`, we need put a symbolic # link `build64_release` to the it's full path. build_dir_name = os.path.basename(self.build_dir) os.symlink(os.path.abspath(self.build_dir), os.path.join(runfiles_dir, build_dir_name)) # For shared libraries with `soname`, their path were not been writen into the executable, # they are always been searched from some given paths. # # libcrypto.so.1.0.0 => /lib64/libcrypto.so.1.0.0 (0x00007f0705d9f000) for soname, full_path in self._get_shared_libraries_with_soname( target): src = os.path.abspath(full_path) dst = os.path.join(runfiles_dir, soname) if os.path.lexists(dst): console.warning( 'Trying to make duplicate symlink for shared library:\n' '%s -> %s\n' '%s -> %s already exists\n' 'skipped, should check duplicate prebuilt ' 'libraries' % (dst, src, dst, os.path.realpath(dst))) continue os.symlink(src, dst)
def _check_test_options(self): """check that test command options. """ if self.options.testargs: console.warning( '--testargs has been deprecated, please put all test' ' arguments after a "--" ') self.options.args = shlex.split( self.options.testargs) + self.options.args
def _check_run_targets(self): """check that run command should have only one target. """ if not self.targets or ':' not in self.targets[0]: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if len(self.targets) > 1: console.warning('run command will only take one target to build and run')
def glob(include, exclude=None, excludes=None, allow_empty=False): """This function can be called in BUILD to specify a set of files using patterns. Args: include:List[str], file patterns to be matched. exclude:Optional[List[str]], file patterns to be removed from the result. allow_empty:bool: Whether a empty result is a error. Patterns may contain shell-like wildcards, such as * , ? , or [charset]. Additionally, the path element '**' matches any subpath. """ from blade import build_manager source_dir = Path(build_manager.instance.get_current_source_path()) source_loc = source_location(os.path.join(str(source_dir), 'BUILD')) include = var_to_list(include) if excludes: console.warning('%s: "glob.excludes" is deprecated, use "exclude" instead' % source_loc, prefix=False) exclude = var_to_list(exclude) + var_to_list(excludes) def includes_iterator(): results = [] for pattern in include: for path in source_dir.glob(pattern): if path.is_file() and not path.name.startswith('.'): results.append(path.relative_to(source_dir)) return results def is_special(pattern): return '*' in pattern or '?' in pattern or '[' in pattern non_special_excludes = set() match_excludes = set() for pattern in exclude: if is_special(pattern): match_excludes.add(pattern) else: non_special_excludes.add(pattern) def exclusion(path): if str(path) in non_special_excludes: return True for pattern in match_excludes: ret = path.match(pattern) if ret: return True return False result = sorted(set([str(p) for p in includes_iterator() if not exclusion(p)])) if not result and not allow_empty: args = repr(include) if exclude: args += ', exclude=%s' % repr(exclude) console.warning('%s: "glob(%s)" got an empty result. If it is the expected behavior, ' 'specify "allow_empty=True" to eliminate this message' % (source_loc, args), prefix=False) return result
def cc_config(append=None, **kwargs): """extra cc config, like extra cpp include path splited by space. """ if 'extra_incs' in kwargs: extra_incs = kwargs['extra_incs'] if isinstance(extra_incs, basestring) and ' ' in extra_incs: console.warning('%s: cc_config: extra_incs has been changed to list' % _blade_config.current_file_name) kwargs['extra_incs'] = extra_incs.split() _blade_config.update_config('cc_config', append, kwargs)
def ninja_rules(self): if not self.srcs: console.warning('%s: Empty scala test sources.' % self.fullname) return jar = self.ninja_generate_jar() output = self._target_file_path() dep_jars, maven_jars = self._get_test_deps() self.ninja_build(output, 'scalatest', inputs=[jar] + dep_jars + maven_jars)
def _load_verify_history(self): if os.path.exists(self._verify_history_path): with open(self._verify_history_path) as f: try: self._verify_history = json.load(f) except Exception as e: # pylint: disable=broad-except console.warning('Error loading %s, ignored. Reason: %s' % ( self._verify_history_path, str(e))) return self._verify_history
def _check_java_debug_options(): from blade import java_targets # pylint: disable=import-outside-toplevel options = java_targets.debug_info_options() for option in options: if 'line' in option: # "-g:line" is required to generate line coverage return console.warning( '"global_config.debug_info_level" is too low to generate java line coverage' )
def _load_verify_history(self): if os.path.exists(self._verify_history_path): with open(self._verify_history_path) as f: try: self._verify_history = json.load(f) except Exception as e: console.warning('Error loading %s, ignore. Reason: %s' % (self._verify_history_path, str(e))) pass return self._verify_history
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, testdata, kwargs): ScalaFatLibrary.__init__(self, name, srcs, deps, resources, source_encoding, warnings, [], kwargs) self.type = 'scala_test' self.data['testdata'] = var_to_list(testdata) scalatest_libs = config.get_item('scala_test_config', 'scalatest_libs') if scalatest_libs: self._add_hardcode_java_library(scalatest_libs) else: console.warning('scalatest jar was not configured')
def _classes_conflict(checked_classes, classes_path, classes): """Check whether classes in class_path already in checked_classes and then ignore this path.. jacoco will raise exception if it meets different classes with same name when report. """ for cls in classes: if cls in checked_classes: console.warning('Conflict: %s/%s already existed in %s' % (classes_path, cls, checked_classes[cls])) return True checked_classes[cls] = classes_path return False
def _load_test_history(self): if os.path.exists(_TEST_HISTORY_FILE): try: with open(_TEST_HISTORY_FILE) as f: # pylint: disable=eval-used self.test_history = eval(f.read()) except (IOError, SyntaxError, NameError, TypeError): console.warning('error loading incremental test history, will run full test') if 'items' not in self.test_history: self.test_history['items'] = {}
def _check_run_targets(self): """check that run command should have only one target. """ if not self.targets or ':' not in self.targets[0]: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if len(self.targets) > 1: console.warning('run command will only take one target to build and run') if self.options.runargs: console.warning('--runargs has been deprecated, please put all run' ' arguments after a "--"') self.options.args = shlex.split(self.options.runargs) + self.options.args
def _load_test_history(self): if os.path.exists(self.test_history_file): with open(self.test_history_file) as f: try: # pylint: disable=eval-used self.test_history = eval(f.read()) except (SyntaxError, NameError, TypeError) as e: console.debug('Exception when loading test history: %s' % e) console.warning('Error loading incremental test history, will run full test') if 'items' not in self.test_history: self.test_history['items'] = {}
def generate_fat_jar(target, jars): """Generate a fat jar containing the contents of all the jar dependencies. """ target_dir = os.path.dirname(target) if not os.path.exists(target_dir): os.makedirs(target_dir) target_fat_jar = zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED) # Record paths written in the fat jar to avoid duplicate writing path_jar_dict = {} conflicts = [] for dep_jar in jars: jar = zipfile.ZipFile(dep_jar, 'r') name_list = jar.namelist() for name in name_list: if name.endswith('/') or not _is_fat_jar_excluded(name): if name not in path_jar_dict: target_fat_jar.writestr(name, jar.read(name)) path_jar_dict[name] = dep_jar else: if name.endswith('/'): continue message = ( '%s: Duplicate path %s found in {%s, %s}' % (target, name, os.path.basename( path_jar_dict[name]), os.path.basename(dep_jar))) # Always log all conflicts for diagnosis console.debug(message) if '/.m2/repository/' not in dep_jar: # There are too many conflicts between maven jars, # so we have to ignore them, only count source code conflicts conflicts.append('\n'.join([ 'Path: %s' % name, 'From: %s' % path_jar_dict[name], 'Ignored: %s' % dep_jar, ])) jar.close() if conflicts: console.warning('%s: Found %d conflicts when packaging.' % (target, len(conflicts))) generate_fat_jar_metadata(target_fat_jar, jars, conflicts) contents = [ 'Manifest-Version: 1.0', 'Created-By: Python.Zipfile (Blade)', 'Built-By: %s' % os.getenv('USER'), 'Build-Time: %s' % time.asctime(), ] contents += _manifest_scm(target.split(os.sep)[0]) contents.append('\n') target_fat_jar.writestr(_JAR_MANIFEST, '\n'.join(contents)) target_fat_jar.close()
def java_jar(name, srcs=[], deps=[], prebuilt=False, pre_build=False, **kwargs): """Define java_jar target. """ target = JavaJarTarget(name, srcs, deps, prebuilt or pre_build, build_manager.instance, kwargs) if pre_build: console.warning('//%s:%s: "pre_build" has been deprecated, ' 'please use "prebuilt"' % (target.path, target.name)) build_manager.instance.register_target(target)
def _replace_config(self, section_name, section, user_config): """Replace config section items""" unknown_keys = [] for k in user_config: if k in section: if isinstance(section[k], list): user_config[k] = var_to_list(user_config[k]) else: console.warning('%s: %s: unknown config item name: %s' % (self.current_file_name, section_name, k)) unknown_keys.append(k) for k in unknown_keys: del user_config[k] section.update(user_config)
def proto_library_config(append=None, **kwargs): """protoc config. """ path = kwargs.get('protobuf_include_path') if path: console.warning(('%s: proto_library_config: protobuf_include_path has ' 'been renamed to protobuf_incs, and become a list') % _blade_config.current_file_name) del kwargs['protobuf_include_path'] if isinstance(path, basestring) and ' ' in path: kwargs['protobuf_incs'] = path.split() else: kwargs['protobuf_incs'] = [path] _blade_config.update_config('proto_library_config', append, kwargs)
def ninja_proto_go_rules(self, plugin_flags): go_home = config.get_item('go_config', 'go_home') protobuf_go_path = config.get_item('proto_library_config', 'protobuf_go_path') generated_goes = [] for src in self.srcs: path = self._source_file_path(src) package = self._get_go_package_name(path) if not package.startswith(protobuf_go_path): console.warning('%s: go_package "%s" is not starting with "%s" in %s' % (self.fullname, package, protobuf_go_path, src)) basename = os.path.basename(src) output = os.path.join(go_home, 'src', package, '%s.pb.go' % basename[:-6]) self.ninja_build(output, 'protogo', inputs=path) generated_goes.append(output) self._add_target_file('gopkg', generated_goes)
def _append_config(self, section_name, section, append): """Append config section items""" if not isinstance(append, dict): console.error('%s: %s: Append must be a dict' % (self.current_file_name, section_name)) for k in append: if k in section: if isinstance(section[k], list): section[k] += var_to_list(append[k]) else: console.warning('%s: %s: Config item %s is not a list' % (self.current_file_name, section_name, k)) else: console.warning('%s: %s: Unknown config item name: %s' % (self.current_file_name, section_name, k))
def _append_config(self, section_name, section, append): """Append config section items""" if not isinstance(append, dict): console.error('%s: %s: append must be a dict' % (self.current_file_name, section_name)) else: for k in append: if k in section: if isinstance(section[k], list): section[k] += var_to_list(append[k]) else: console.warning('%s: %s: config item %s is not a list' % (self.current_file_name, section_name, k)) else: console.warning('%s: %s: unknown config item name: %s' % (self.current_file_name, section_name, k))
def _set_pack_exclusions(self, exclusions): exclusions = var_to_list(exclusions) self.data['exclusions'] = [] for exclusion in exclusions: if maven.is_valid_id(exclusion): if '*' in exclusion: if not self.__is_valid_maven_id_with_wildcards(exclusion): console.warning('%s: Invalid maven id with wildcards %s. ' 'Ignored. The valid id could be: ' 'group:artifact:*, group:*:*, *:*:*' % (self.fullname, exclusion)) continue self.data['exclusions'].append(exclusion) else: console.warning('%s: Exclusions only support maven id ' 'group:artifact:version. Ignore %s' % ( self.fullname, exclusion))
def _show_slow_builds(build_start_time, show_builds_slower_than): build_dir = build_manager.instance.get_build_path() with open(os.path.join(build_dir, '.ninja_log')) as f: head = f.readline() if '# ninja log v5' not in head: console.warning('Unknown ninja log version: %s' % head) return build_times = [] for line in f.readlines(): start_time, end_time, timestamp, target, cmdhash = line.split() cost_time = (int(end_time) - int(start_time)) / 1000.0 # ms -> s timestamp = int(timestamp) if timestamp >= build_start_time and cost_time > show_builds_slower_than: build_times.append((cost_time, target)) if build_times: console.notice('Slow build targets:') for cost_time, target in sorted(build_times): console.notice('%.4gs\t%s' % (cost_time, target), prefix=False)
def _process_regular_resources(self, resources): results = set() for resource in resources: full_path, jar_path = self._get_resource_path(resource) if not os.path.exists(full_path): console.warning('%s: Resource %s does not exist.' % ( self.fullname, full_path)) results.add((full_path, jar_path)) # delay error to build phase elif os.path.isfile(full_path): results.add((full_path, jar_path)) else: for dir, subdirs, files in os.walk(full_path): # Skip over subdirs starting with '.', such as .svn subdirs[:] = [d for d in subdirs if not d.startswith('.')] for f in files: f = os.path.join(dir, f) rel_path = os.path.relpath(f, full_path) results.add((f, os.path.join(jar_path, rel_path))) return sorted(results)
def _generate_location_resources(self, resources, resources_var): env_name = self._env_name() resources_dir = self._target_file_path() + '.resources' targets = self.blade.get_build_targets() for i, resource in enumerate(resources): key, type, dst = resource target = targets[key] target_var = target._get_target_var(type) if not target_var: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) continue if dst: dst_path = os.path.join(resources_dir, dst) else: dst_path = os.path.join(resources_dir, '${SOURCE.file}') res_var = self._var_name('location_resources__%s' % i) self._write_rule('%s = %s.JavaResource(target = "%s", source = %s)' % (res_var, env_name, dst_path, target_var)) self._write_rule('%s.append(%s)' % (resources_var, res_var))
def _generate_location_reference_rules(self, location_vars, sources_dir): env_name = self._env_name() targets = self.blade.get_build_targets() for i, location in enumerate(self.data['locations']): key, type, dst = location target = targets[key] target_var = target._get_target_var(type) if not target_var: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) continue if dst: dst = os.path.join(sources_dir, dst) var = self._var_name('location__%s' % i) self._write_rule('%s = %s.PackageSource(target = "%s", source = %s)' % (var, env_name, dst, target_var)) location_vars.append(var) else: location_vars.append(target_var)
def _generate_test_data_rules(self): env_name = self._env_name() var_name = self._var_name('testdata') targets = self.blade.get_build_targets() sources = [] for key, type, dst in self.data['locations']: target = targets[key] target_var = target._get_target_var(type) if not target_var: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) else: sources.append('%s, %s.Value("%s")' % (target_var, env_name, dst)) if sources: self._write_rule('%s = %s.ShellTestData(target = "%s.testdata", ' 'source = [%s])' % ( var_name, env_name, self._target_file_path(), ', '.join(sources)))
def ninja_rules(self): srcs = [self._source_file_path(s) for s in self.srcs] output = self._target_file_path() self.ninja_build(output, 'shelltest', inputs=srcs) targets = self.blade.get_build_targets() inputs, testdata = [], [] for key, type, dst in self.data['locations']: path = targets[key]._get_target_file(type) if not path: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) else: inputs.append(path) if not dst: testdata.append(os.path.basename(path)) else: testdata.append(dst) if inputs: output = '%s.testdata' % self._target_file_path() self.ninja_build(output, 'shelltestdata', inputs=inputs, variables={'testdata' : ' '.join(testdata)})
def _prepare_env(self, target): """Prepare the test environment. """ runfiles_dir = self._runfiles_dir(target) shutil.rmtree(runfiles_dir, ignore_errors=True) os.mkdir(runfiles_dir) # Build profile symlink profile_link_name = os.path.basename(self.build_dir) os.symlink(os.path.abspath(self.build_dir), os.path.join(runfiles_dir, profile_link_name)) # Prebuilt library symlink for prebuilt_file in self._get_prebuilt_files(target): src = os.path.abspath(prebuilt_file[0]) dst = os.path.join(runfiles_dir, prebuilt_file[1]) if os.path.lexists(dst): console.warning('trying to make duplicate prebuilt symlink:\n' '%s -> %s\n' '%s -> %s already exists\n' 'skipped, should check duplicate prebuilt ' 'libraries' % (dst, src, dst, os.path.realpath(dst))) continue os.symlink(src, dst) self._prepare_test_data(target) run_env = dict(os.environ) environ_add_path(run_env, 'LD_LIBRARY_PATH', runfiles_dir) run_lib_paths = config.get_item('cc_binary_config', 'run_lib_paths') if run_lib_paths: for path in run_lib_paths: if path.startswith('//'): path = path[2:] path = os.path.abspath(path) environ_add_path(run_env, 'LD_LIBRARY_PATH', path) java_home = config.get_item('java_config', 'java_home') if java_home: java_home = os.path.abspath(java_home) environ_add_path(run_env, 'PATH', os.path.join(java_home, 'bin')) return run_env
def _prepare_test_data(self, target): if 'testdata' not in target.data: return runfiles_dir = self._runfiles_dir(target) dest_list = [] for i in target.data['testdata']: if isinstance(i, tuple): src, dest = i else: src = dest = i if '..' in src: console.warning('%s: Relative path is not allowed in testdata source. ' 'Ignored %s.' % (target.fullname, src)) continue if src.startswith('//'): src = src[2:] else: src = os.path.join(target.path, src) if dest.startswith('//'): dest = dest[2:] dest = os.path.normpath(dest) self.__check_test_data_dest(target, dest, dest_list) dest_list.append(dest) dest_path = os.path.join(runfiles_dir, dest) if os.path.exists(dest_path): console.warning('%s: %s already existed, could not prepare testdata.' % (target.fullname, dest)) continue try: os.makedirs(os.path.dirname(dest_path)) except OSError: pass if os.path.isfile(src): shutil.copy2(src, dest_path) elif os.path.isdir(src): shutil.copytree(src, dest_path) self._prepare_extra_test_data(target)
def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ("Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def ninja_rules(self): inputs, entries = [], [] for src, dst in self.data['sources']: inputs.append(src) entries.append(dst) targets = self.blade.get_build_targets() for key, type, dst in self.data['locations']: path = targets[key]._get_target_file(type) if not path: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) continue if not dst: dst = os.path.basename(path) inputs.append(path) entries.append(dst) output = self._target_file_path(self.data['out']) if not self.data['shell']: self.ninja_build(output, 'package', inputs=inputs, variables={'entries' : ' '.join(entries)}) else: self.ninja_package_in_shell(output, inputs, entries)