def __init__(self, name, type, srcs, deps, resources, source_encoding, warnings, kwargs): """Init method. Init the scala target. """ srcs = var_to_list(srcs) deps = var_to_list(deps) resources = var_to_list(resources) Target.__init__(self, name, type, srcs, deps, None, build_manager.instance, kwargs) self._process_resources(resources) if source_encoding: self.data['source_encoding'] = source_encoding if warnings: self.data['warnings'] = warnings
def __init__(self, name, srcs, deps, main, base, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) PythonLibrary.__init__(self, name, srcs, deps, base, None, kwargs) self.type = 'py_binary' self.data['run_in_shell'] = True if main: self.data['main'] = main else: if len(srcs) == 1: self.data['main'] = srcs[0] else: console.error_exit( '%s: The entry file must be specified by the "main" ' 'argument if there are more than one srcs' % self.fullname)
def __init__(self, name, srcs, deps, type, out, shell, blade, kwargs): srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, 'package', [], deps, None, blade, kwargs) if type not in _package_types: console.error_exit('%s: Invalid type %s. Types supported ' 'by the package are %s' % ( self.fullname, type, ', '.join(sorted(_package_types)))) self.data['type'] = type self.data['sources'], self.data['locations'] = [], [] self._process_srcs(srcs) if not out: out = '%s.%s' % (name, type) self.data['out'] = out self.data['shell'] = shell
def __init__(self, name, type, srcs, deps, base, visibility, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, type, srcs, deps, visibility, build_manager.instance, kwargs) if base: if not base.startswith('//'): console.error_exit('%s: Invalid base directory %s. Option base should ' 'be a directory starting with \'//\' from BLADE_ROOT directory.' % (self.fullname, base)) self.data['python_base'] = base[2:] self.data['python_sources'] = [self._source_file_path(s) for s in srcs]
def __init__(self, name, srcs, deps, outs, cmd, blade, kwargs): """Init method. Init the gen rule target. """ srcs = var_to_list(srcs) deps = var_to_list(deps) outs = var_to_list(outs) Target.__init__(self, name, 'gen_rule', srcs, deps, None, blade, kwargs) self.data['outs'] = outs self.data['locations'] = [] self.data['cmd'] = location_re.sub(self._process_location_reference, cmd)
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, exported_deps, provided_deps, kwargs): exported_deps = var_to_list(exported_deps) provided_deps = var_to_list(provided_deps) all_deps = var_to_list(deps) + exported_deps + provided_deps ScalaTarget.__init__(self, name, 'scala_library', srcs, all_deps, resources, source_encoding, warnings, kwargs) self.data['exported_deps'] = self._unify_deps(exported_deps) self.data['provided_deps'] = self._unify_deps(provided_deps)
def _check_proto_deps(self): """Only proto_library or gen_rule target is allowed as deps. """ proto_config = config.get_section('proto_library_config') protobuf_libs = var_to_list(proto_config['protobuf_libs']) protobuf_java_libs = var_to_list(proto_config['protobuf_java_libs']) protobuf_libs = [self._unify_dep(d) for d in protobuf_libs + protobuf_java_libs] proto_deps = protobuf_libs + self.data['protoc_plugin_deps'] for dkey in self.deps: if dkey in proto_deps: continue dep = self.target_database[dkey] if dep.type != 'proto_library' and dep.type != 'gen_rule': console.error_exit('%s: Invalid dep %s. Proto_library can ' 'only depend on proto_library or gen_rule.' % (self.fullname, dep.fullname))
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, prebuilt, binary_jar, exported_deps, provided_deps, kwargs): type = 'java_library' if prebuilt: type = 'prebuilt_java_library' exported_deps = var_to_list(exported_deps) provided_deps = var_to_list(provided_deps) all_deps = var_to_list(deps) + exported_deps + provided_deps JavaTarget.__init__(self, name, type, srcs, all_deps, resources, source_encoding, warnings, kwargs) self.data['exported_deps'] = self._unify_deps(exported_deps) self.data['provided_deps'] = self._unify_deps(provided_deps) if prebuilt: if not binary_jar: binary_jar = name + '.jar' self.data['binary_jar'] = self._source_file_path(binary_jar)
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, main_class, exclusions, testdata, target_under_test, kwargs): JavaBinary.__init__(self, name, srcs, deps, resources, source_encoding, warnings, main_class, exclusions, kwargs) self.type = 'java_test' self.data['testdata'] = var_to_list(testdata) if target_under_test: self.data['target_under_test'] = self._unify_dep(target_under_test)
def __init__(self, name, srcs, deps, testdata, kwargs): srcs = var_to_list(srcs) deps = var_to_list(deps) testdata = var_to_list(testdata) Target.__init__(self, name, 'sh_test', srcs, deps, None, build_manager.instance, kwargs) self._process_test_data(testdata)
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, testdata, kwargs): ScalaFatLibrary.__init__(self, name, srcs, deps, resources, source_encoding, warnings, [], kwargs) self.type = 'scala_test' self.data['testdata'] = var_to_list(testdata) scalatest_libs = config.get_item('scala_test_config', 'scalatest_libs') if scalatest_libs: self._add_hardcode_java_library(scalatest_libs) else: console.warning('scalatest jar was not configured')
def _replace_config(self, section_name, section, user_config): """Replace config section items""" unknown_keys = [] for k in user_config: if k in section: if isinstance(section[k], list): user_config[k] = var_to_list(user_config[k]) else: console.warning('%s: %s: unknown config item name: %s' % (self.current_file_name, section_name, k)) unknown_keys.append(k) for k in unknown_keys: del user_config[k] section.update(user_config)
def glob(srcs, excludes=[]): """A global function can be called in BUILD to specify a set of files using patterns""" from blade import build_manager srcs = var_to_list(srcs) excludes = var_to_list(excludes) source_dir = Path(build_manager.instance.get_current_source_path()) def includes_iterator(): results = [] for pattern in srcs: for path in source_dir.glob(pattern): if path.is_file() and not path.name.startswith('.'): results.append(path.relative_to(source_dir)) return results def is_special(pattern): return '*' in pattern or '?' in pattern or '[' in pattern non_special_excludes = set() match_excludes = set() for pattern in excludes: if is_special(pattern): match_excludes.add(pattern) else: non_special_excludes.add(pattern) def exclusion(path): if str(path) in non_special_excludes: return True for pattern in match_excludes: ret = path.match(pattern) if ret: return True return False return sorted(set([str(p) for p in includes_iterator() if not exclusion(p)]))
def __init__(self, name, srcs, deps, optimize, deprecated, blade, kwargs): srcs = var_to_list(srcs) self._check_thrift_srcs_name(srcs) CcTarget.__init__(self, name, 'fbthrift_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) fbthrift_config = config.get_section('fbthrift_config') fbthrift_libs = var_to_list(fbthrift_config['fbthrift_libs']) # Hardcode deps rule to thrift libraries. self._add_hardcode_library(fbthrift_libs) # Link all the symbols by default self.data['link_all_symbols'] = True # For each thrift file initialize a FBThriftHelper, which will be used # to get the source files generated from thrift file. self.fbthrift_helpers = {} for src in srcs: self.fbthrift_helpers[src] = FBThriftHelper( os.path.join(self.path, src))
def _set_pack_exclusions(self, exclusions): exclusions = var_to_list(exclusions) self.data['exclusions'] = [] for exclusion in exclusions: if maven.is_valid_id(exclusion): if '*' in exclusion: if not self.__is_valid_maven_id_with_wildcards(exclusion): console.warning('%s: Invalid maven id with wildcards %s. ' 'Ignored. The valid id could be: ' 'group:artifact:*, group:*:*, *:*:*' % (self.fullname, exclusion)) continue self.data['exclusions'].append(exclusion) else: console.warning('%s: Exclusions only support maven id ' 'group:artifact:version. Ignore %s' % ( self.fullname, exclusion))
def _append_config(self, section_name, section, append): """Append config section items""" if not isinstance(append, dict): console.error('%s: %s: append must be a dict' % (self.current_file_name, section_name)) else: for k in append: if k in section: if isinstance(section[k], list): section[k] += var_to_list(append[k]) else: console.warning('%s: %s: config item %s is not a list' % (self.current_file_name, section_name, k)) else: console.warning('%s: %s: unknown config item name: %s' % (self.current_file_name, section_name, k))
def __init__(self, name, srcs, deps, optimize, deprecated, blade, kwargs): srcs = var_to_list(srcs) self._check_thrift_srcs_name(srcs) CcTarget.__init__(self, name, 'thrift_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) self.data['python_vars'] = [] self.data['python_sources'] = [] thrift_libs = config.get_item('thrift_config', 'thrift_libs') # Hardcode deps rule to thrift libraries. self._add_hardcode_library(thrift_libs) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated self.data['java_sources_explict_dependency'] = [] # For each thrift file initialize a ThriftHelper, which will be used # to get the source files generated from thrift file. self.thrift_helpers = {} for src in srcs: self.thrift_helpers[src] = ThriftHelper(self.path, src)
def __init__(self, name, path, code_generation): self.name = name self.path = path assert isinstance(code_generation, dict) self.code_generation = {} for language, v in code_generation.iteritems(): if language not in self.__languages: console.error_exit('%s: Language %s is invalid. ' 'Protoc plugins in %s are supported by blade currently.' % ( name, language, ', '.join(self.__languages))) self.code_generation[language] = {} # Note that each plugin dep should be in the global target format # since protoc plugin is defined in the global scope deps = [] for dep in var_to_list(v['deps']): if dep.startswith('//'): dep = dep[2:] key = tuple(dep.split(':')) if key not in deps: deps.append(key) self.code_generation[language]['deps'] = deps
def __init__(self, name, srcs, deps, optimize, deprecated, generate_descriptors, plugins, source_encoding, blade, kwargs): """Init method. Init the proto target. """ # pylint: disable=too-many-locals srcs = var_to_list(srcs) self._check_proto_srcs_name(srcs) CcTarget.__init__(self, name, 'proto_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) if srcs: self.data['public_protos'] = [ self._source_file_path(s) for s in srcs ] proto_config = config.get_section('proto_library_config') protobuf_libs = var_to_list(proto_config['protobuf_libs']) protobuf_java_libs = var_to_list(proto_config['protobuf_java_libs']) protobuf_python_libs = var_to_list( proto_config['protobuf_python_libs']) # Hardcode deps rule to thirdparty protobuf lib. self._add_hardcode_library(protobuf_libs) self._add_hardcode_java_library(protobuf_java_libs) self._add_hardcode_library(protobuf_python_libs) plugins = var_to_list(plugins) self.data['protoc_plugins'] = plugins # Handle protoc plugin deps according to the language protoc_plugin_config = config.get_section('protoc_plugin_config') protoc_plugin_deps = set() protoc_plugin_java_deps = set() for plugin in plugins: if plugin not in protoc_plugin_config: console.error_exit('%s: Unknown plugin %s' % (self.fullname, plugin)) p = protoc_plugin_config[plugin] for language, v in iteritems(p.code_generation): for key in v['deps']: if key not in self.deps: self.deps.append(key) if key not in self.expanded_deps: self.expanded_deps.append(key) protoc_plugin_deps.add(key) if language == 'java': protoc_plugin_java_deps.add(key) self.data['protoc_plugin_deps'] = list(protoc_plugin_deps) # Normally a proto target depends on another proto target when # it references a message defined in that target. Then in the # generated code there is public API with return type/arguments # defined outside and in java it needs to export that dependency, # which is also the case for java protobuf library. self.data['exported_deps'] = self._unify_deps(var_to_list(deps)) self.data['exported_deps'] += self._unify_deps(protobuf_java_libs) self.data['exported_deps'] += list(protoc_plugin_java_deps) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated self.data['source_encoding'] = source_encoding self.data['java_sources_explict_dependency'] = [] self.data['python_vars'] = [] self.data['python_sources'] = [] self.data['generate_descriptors'] = generate_descriptors
def glob(include, exclude=None, excludes=None, allow_empty=False): """This function can be called in BUILD to specify a set of files using patterns. Args: include:List[str], file patterns to be matched. exclude:Optional[List[str]], file patterns to be removed from the result. allow_empty:bool: Whether a empty result is a error. Patterns may contain shell-like wildcards, such as * , ? , or [charset]. Additionally, the path element '**' matches any subpath. """ from blade import build_manager # pylint: disable=import-outside-toplevel source_dir = Path(build_manager.instance.get_current_source_path()) source_loc = _current_source_location() include = var_to_list(include) severity = config.get_item('global_config', 'glob_error_severity') output = getattr(console, severity) if excludes: output('%s %s: "excludes" is deprecated, use "exclude" instead' % (source_loc, severity), prefix=False) exclude = var_to_list(exclude) + var_to_list(excludes) def includes_iterator(): results = [] for pattern in include: for path in source_dir.glob(pattern): if path.is_file() and not path.name.startswith('.'): results.append(path.relative_to(source_dir)) return results def is_special(pattern): return '*' in pattern or '?' in pattern or '[' in pattern non_special_excludes = set() match_excludes = set() for pattern in exclude: if is_special(pattern): match_excludes.add(pattern) else: non_special_excludes.add(pattern) def exclusion(path): if str(path) in non_special_excludes: return True for pattern in match_excludes: ret = path.match(pattern) if ret: return True return False result = sorted({str(p) for p in includes_iterator() if not exclusion(p)}) if not result and not allow_empty: args = repr(include) if exclude: args += ', exclude=%s' % repr(exclude) output( '%s %s: "glob(%s)" got an empty result. If it is the expected behavior, ' 'specify "allow_empty=True" to eliminate this message' % (source_loc, severity, args), prefix=False) return result
def __init__(self, name, srcs, deps, testdata, extra_goflags, kwargs): GoTarget.__init__(self, name, 'go_test', srcs, deps, extra_goflags, kwargs) self.data['go_rule'] = 'gotest' self.data['testdata'] = var_to_list(testdata)
def glob(include, exclude=None, excludes=None, allow_empty=False): """This function can be called in BUILD to specify a set of files using patterns. Args: include:List(str), file patterns to be matched. exclude:List[str], file patterns to be removed from the result. allow_empty:bool: Whether a empty result is a error. Patterns may contain shell-like wildcards, such as * , ? , or [charset]. Additionally, the path element '**' matches any subpath. """ from blade import build_manager source_dir = Path(build_manager.instance.get_current_source_path()) include = var_to_list(include) if excludes: console.warning( '//%s: "glob.excludes" is deprecated, use "exclude" instead' % source_dir) exclude = var_to_list(exclude) + var_to_list(excludes) def includes_iterator(): results = [] for pattern in include: for path in source_dir.glob(pattern): if path.is_file() and not path.name.startswith('.'): results.append(path.relative_to(source_dir)) return results def is_special(pattern): return '*' in pattern or '?' in pattern or '[' in pattern non_special_excludes = set() match_excludes = set() for pattern in exclude: if is_special(pattern): match_excludes.add(pattern) else: non_special_excludes.add(pattern) def exclusion(path): if str(path) in non_special_excludes: return True for pattern in match_excludes: ret = path.match(pattern) if ret: return True return False result = sorted( set([str(p) for p in includes_iterator() if not exclusion(p)])) if not result and not allow_empty: args = repr(include) if exclude: args += ', exclude=%s' % repr(exclude) console.error( '//%s: "glob(%s)" got an empty result. If it is the expected behavior, ' 'specify "allow_empty=True" to eliminate this error' % (source_dir, args)) return result
def __init__(self, name, srcs, deps, visibility, optimize, deprecated, generate_descriptors, target_languages, plugins, source_encoding, kwargs): """Init method. Init the proto target. """ # pylint: disable=too-many-locals srcs = var_to_list(srcs) super(ProtoLibrary, self).__init__( name=name, type='proto_library', srcs=srcs, deps=deps, visibility=visibility, warning='', defs=[], incs=[], export_incs=[], optimize=optimize, extra_cppflags=[], extra_linkflags=[], kwargs=kwargs) self._check_proto_srcs_name(srcs) if srcs: self.attr['public_protos'] = [self._source_file_path(s) for s in srcs] proto_config = config.get_section('proto_library_config') protobuf_libs = var_to_list(proto_config['protobuf_libs']) protobuf_java_libs = var_to_list(proto_config['protobuf_java_libs']) protobuf_python_libs = var_to_list(proto_config['protobuf_python_libs']) # Implicit deps rule to thirdparty protobuf lib. self._add_implicit_library(protobuf_libs) self._add_implicit_library(protobuf_java_libs) self._add_implicit_library(protobuf_python_libs) # Normally a proto target depends on another proto target when # it references a message defined in that target. Then in the # generated code there is public API with return type/arguments # defined outside and in java it needs to export that dependency, # which is also the case for java protobuf library. self.attr['exported_deps'] = self._unify_deps(var_to_list(deps)) self.attr['exported_deps'] += self._unify_deps(protobuf_java_libs) self._handle_protoc_plugins(var_to_list(plugins)) # Link all the symbols by default self.attr['link_all_symbols'] = True self.attr['deprecated'] = deprecated self.attr['source_encoding'] = source_encoding self.attr['generate_descriptors'] = generate_descriptors # TODO(chen3feng): Change the values to a `set` rather than separated attributes target_languages = set(var_to_list(target_languages)) options = self.blade.get_options() self.attr['generate_java'] = 'java' in target_languages or getattr(options, 'generate_java', False) self.attr['generate_python'] = 'python' in target_languages or getattr(options, 'generate_python', False) self.attr['generate_go'] = 'go' in target_languages or getattr(options, 'generate_go', False) # Declare generated header files full_cpp_headers = [] cpp_headers = [] for src in self.srcs: full_source, full_header = self._proto_gen_cpp_files(src) full_cpp_headers.append(full_header) source, header = self._proto_gen_cpp_file_names(src) cpp_headers.append(header) self.attr['generated_hdrs'] = full_cpp_headers self._set_hdrs(cpp_headers)
def __init__(self, name, srcs, deps, optimize, deprecated, generate_descriptors, plugins, source_encoding, blade, kwargs): """Init method. Init the proto target. """ # pylint: disable=too-many-locals srcs = var_to_list(srcs) self._check_proto_srcs_name(srcs) CcTarget.__init__(self, name, 'proto_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) if srcs: self.data['public_protos'] = [self._source_file_path(s) for s in srcs] proto_config = config.get_section('proto_library_config') protobuf_libs = var_to_list(proto_config['protobuf_libs']) protobuf_java_libs = var_to_list(proto_config['protobuf_java_libs']) protobuf_python_libs = var_to_list(proto_config['protobuf_python_libs']) # Hardcode deps rule to thirdparty protobuf lib. self._add_hardcode_library(protobuf_libs) self._add_hardcode_java_library(protobuf_java_libs) self._add_hardcode_library(protobuf_python_libs) plugins = var_to_list(plugins) self.data['protoc_plugins'] = plugins # Handle protoc plugin deps according to the language protoc_plugin_config = config.get_section('protoc_plugin_config') protoc_plugin_deps = set() protoc_plugin_java_deps = set() for plugin in plugins: if plugin not in protoc_plugin_config: console.error_exit('%s: Unknown plugin %s' % (self.fullname, plugin)) p = protoc_plugin_config[plugin] for language, v in p.code_generation.iteritems(): for key in v['deps']: if key not in self.deps: self.deps.append(key) if key not in self.expanded_deps: self.expanded_deps.append(key) protoc_plugin_deps.add(key) if language == 'java': protoc_plugin_java_deps.add(key) self.data['protoc_plugin_deps'] = list(protoc_plugin_deps) # Normally a proto target depends on another proto target when # it references a message defined in that target. Then in the # generated code there is public API with return type/arguments # defined outside and in java it needs to export that dependency, # which is also the case for java protobuf library. self.data['exported_deps'] = self._unify_deps(var_to_list(deps)) self.data['exported_deps'] += self._unify_deps(protobuf_java_libs) self.data['exported_deps'] += list(protoc_plugin_java_deps) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated self.data['source_encoding'] = source_encoding self.data['java_sources_explict_dependency'] = [] self.data['python_vars'] = [] self.data['python_sources'] = [] self.data['generate_descriptors'] = generate_descriptors