def __init__(self, argv): """Init the class. """ self.options, others = self._cmd_parse(argv) # If '--' in arguments, use all other arguments after it as run # arguments if '--' in others: pos = others.index('--') self.targets = others[:pos] self.options.args = others[pos + 1:] else: self.targets = others self.options.args = [] for t in self.targets: if t.startswith('-'): console.error_exit( 'Unrecognized option %s, use blade [action] ' '--help to get all the options' % t) command = self.options.command # Check the options with different sub command actions = { 'build': self._check_build_command, 'clean': self._check_clean_command, 'dump': self._check_dump_command, 'query': self._check_query_command, 'run': self._check_run_command, 'test': self._check_test_command, } actions[command]()
def _normalize_target(target, working_dir): '''Normalize target from command line into canonical form. Target canonical form: dir:name dir: relative to blade_root_dir, use '.' for blade_root_dir name: name if target is dir:name '*' if target is dir '...' if target is dir/... ''' if target.startswith('//'): target = target[2:] elif target.startswith('/'): console.error_exit('Invalid target "%s" starting from root path.' % target) else: if working_dir != '.': target = os.path.join(working_dir, target) if ':' in target: path, name = target.rsplit(':', 1) else: if target.endswith('...'): path = target[:-3] name = '...' else: path = target name = '*' path = os.path.normpath(path) return '%s:%s' % (path, name)
def __init__(self, name, type, srcs, deps, base, visibility, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, type, srcs, deps, visibility, build_manager.instance, kwargs) if base: if not base.startswith('//'): console.error_exit('%s: Invalid base directory %s. Option base should ' 'be a directory starting with \'//\' from BLADE_ROOT directory.' % (self.fullname, base)) self.data['python_base'] = base[2:] self.data['python_sources'] = [self._source_file_path(s) for s in srcs]
def __init__(self, name, srcs, deps, type, out, shell, blade, kwargs): srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, 'package', [], deps, None, blade, kwargs) if type not in _package_types: console.error_exit('%s: Invalid type %s. Types supported ' 'by the package are %s' % ( self.fullname, type, ', '.join(sorted(_package_types)))) self.data['type'] = type self.data['sources'], self.data['locations'] = [], [] self._process_srcs(srcs) if not out: out = '%s.%s' % (name, type) self.data['out'] = out self.data['shell'] = shell
def _find_all_deps(target_id, targets, deps_map_cache, root_targets=None): """_find_all_deps. Return all targets depended by target_id directly and/or indirectly. We need the parameter root_target_id to check loopy dependency. """ new_deps_list = deps_map_cache.get(target_id) if new_deps_list is not None: return new_deps_list if root_targets is None: root_targets = set() root_targets.add(target_id) new_deps_list = [] for d in targets[target_id].expanded_deps: # loop dependency if d in root_targets: err_msg = '' for t in root_targets: err_msg += '//%s:%s --> ' % (t[0], t[1]) console.error_exit('loop dependency found: //%s:%s --> [%s]' % (d[0], d[1], err_msg)) _check_dep_visibility(target_id, d, targets) new_deps_list.append(d) new_deps_list += _find_all_deps(d, targets, deps_map_cache, root_targets) new_deps_list = _unique_deps(new_deps_list) deps_map_cache[target_id] = new_deps_list root_targets.remove(target_id) return new_deps_list
def go_package(name, deps=[], testdata=[], extra_goflags=None): path = build_manager.instance.get_current_source_path() srcs, tests = find_go_srcs(path) if not srcs and not tests: console.error_exit('Empty go sources in %s' % path) if srcs: main = False for src in srcs: package = extract_go_package(os.path.join(path, src)) if package == 'main': main = True break if main: go_binary(name=name, srcs=srcs, deps=deps, extra_goflags=extra_goflags) else: go_library(name=name, srcs=srcs, deps=deps, extra_goflags=extra_goflags) if tests: go_test(name='%s_test' % name, srcs=tests, deps=deps, testdata=testdata, extra_goflags=extra_goflags)
def _proto_go_rules(self): """Generate go files. """ env_name = self._env_name() var_name = self._var_name('go') go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit('%s: go_home is not configured in BLADE_ROOT.' % self.fullname) proto_go_path = config.get_item('proto_library_config', 'protobuf_go_path') self._write_rule('%s.Replace(PROTOBUFGOPATH="%s")' % (env_name, proto_go_path)) self._write_rule('%s = []' % var_name) for src in self.srcs: proto_src = os.path.join(self.path, src) go_src = self._proto_gen_go_file(src) go_src_var = self._var_name_of(src, 'go_src') self._write_rule('%s = %s.ProtoGo("%s", "%s")' % (go_src_var, env_name, go_src, proto_src)) # Copy the generated go sources to $GOPATH # according to the standard go directory layout proto_dir = os.path.dirname(src) proto_name = os.path.basename(src) go_dst = os.path.join(go_home, 'src', proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) go_dst_var = self._var_name_of(src, 'go_dst') self._write_rule('%s = %s.ProtoGoSource("%s", %s)' % (go_dst_var, env_name, go_dst, go_src_var)) self._write_rule('%s.append(%s)' % (var_name, go_dst_var)) self._add_target_var('go', var_name)
def __init__(self, log_dir): """Init method. """ if not os.path.exists(log_dir): os.makedirs(log_dir) self.__log_dir = log_dir # key: (id, classifier) # id: jar id in the format group:artifact:version # value: an instance of MavenArtifact self.__jar_database = {} java_config = config.get_section('java_config') self.__maven = java_config.get('maven') self.__central_repository = java_config.get('maven_central') self._check_config() self.__snapshot_update_policy = java_config.get( 'maven_snapshot_update_policy') if self.__snapshot_update_policy == 'interval': interval = java_config.get('maven_snapshot_update_interval') if not interval: console.error_exit( 'java_config: "maven_snapshot_update_interval" is required when ' '"maven_snapshot_update_policy" is "interval"') self.__snapshot_update_interval = interval * 60 # minutes else: self.__snapshot_update_interval = 86400 # Local repository is set to the maven default directory # and could not be configured currently local_repository = '~/.m2/repository' self.__local_repository = os.path.expanduser(local_repository) # Download the snapshot artifact daily self.__build_time = time.time()
def _report_not_exist(source_dir, path, blade): """Report dir or BUILD file does not exist. """ depender = _find_dir_depender(source_dir, blade) if depender: console.error_exit('//%s not found, required by %s' % (path, depender)) else: console.error_exit('//%s not found' % path)
def md5sum_str(user_str): """md5sum of basestring. """ if not isinstance(user_str, basestring): console.error_exit('Not a valid basestring type to calculate md5.') m = md5.md5() m.update(user_str) return m.hexdigest()
def __init__(self, name, srcs, deps, main, base, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) PythonLibrary.__init__(self, name, srcs, deps, base, None, kwargs) self.type = 'py_binary' self.data['run_in_shell'] = True if main: self.data['main'] = main else: if len(srcs) == 1: self.data['main'] = srcs[0] else: console.error_exit( '%s: The entry file must be specified by the "main" ' 'argument if there are more than one srcs' % self.fullname)
def _check_config(self): """Check whether maven is configured correctly. """ if not self.__need_check_config: return if not self.__maven: console.error_exit('MavenCache was not configured') self.__need_check_config = False
def _report_not_exist(source_dir, path, blade): """Report dir or BUILD file does not exist. """ depender = _find_dir_depender(source_dir, blade) if depender: console.error_exit('//%s not found, required by %s' % (path, depender)) else: console.error_exit('//%s not found' % path)
def __init__(self, name, type, srcs, deps, base, visibility, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, type, srcs, deps, visibility, build_manager.instance, kwargs) if base: if not base.startswith('//'): console.error_exit('%s: Invalid base directory %s. Option base should ' 'be a directory starting with \'//\' from BLADE_ROOT directory.' % (self.fullname, base)) self.data['python_base'] = base[2:] self.data['python_sources'] = [self._source_file_path(s) for s in srcs]
def __init__(self, name, srcs, deps, main, base, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) PythonLibrary.__init__(self, name, srcs, deps, base, None, kwargs) self.type = 'py_binary' self.data['run_in_shell'] = True if main: self.data['main'] = main else: if len(srcs) == 1: self.data['main'] = srcs[0] else: console.error_exit( '%s: The entry file must be specified by the "main" ' 'argument if there are more than one srcs' % self.fullname)
def generate_proto_rules(self): proto_config = config.get_section('proto_library_config') protoc = proto_config['protoc'] protoc_java = protoc if proto_config['protoc_java']: protoc_java = proto_config['protoc_java'] protobuf_incs = protoc_import_path_option( proto_config['protobuf_incs']) protobuf_java_incs = protobuf_incs if proto_config['protobuf_java_incs']: protobuf_java_incs = protoc_import_path_option( proto_config['protobuf_java_incs']) self._add_rule(''' protocflags = protoccpppluginflags = protocjavapluginflags = protocpythonpluginflags = ''') self.generate_rule( name='proto', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--cpp_out=%s ${protocflags} ${protoccpppluginflags} ${in}' % (protoc, protobuf_incs, self.build_dir), description='PROTOC ${in}') self.generate_rule( name='protojava', command='%s --proto_path=. %s --java_out=%s/`dirname ${in}` ' '${protocjavapluginflags} ${in}' % (protoc_java, protobuf_java_incs, self.build_dir), description='PROTOCJAVA ${in}') self.generate_rule(name='protopython', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--python_out=%s ${protocpythonpluginflags} ${in}' % (protoc, protobuf_incs, self.build_dir), description='PROTOCPYTHON ${in}') self.generate_rule( name='protodescriptors', command='%s --proto_path=. %s -I=`dirname ${first}` ' '--descriptor_set_out=${out} --include_imports ' '--include_source_info ${in}' % (protoc, protobuf_incs), description='PROTODESCRIPTORS ${in}') protoc_go_plugin = proto_config['protoc_go_plugin'] if protoc_go_plugin: go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit( 'go_home is not configured in either BLADE_ROOT or BLADE_ROOT.local.' ) outdir = os.path.join(go_home, 'src') subplugins = proto_config['protoc_go_subplugins'] if subplugins: go_out = 'plugins=%s:%s' % ('+'.join(subplugins), outdir) else: go_out = outdir self.generate_rule( name='protogo', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--plugin=protoc-gen-go=%s --go_out=%s ${in}' % (protoc, protobuf_incs, protoc_go_plugin, go_out), description='PROTOCGOLANG ${in}')
def scons_rules(self): """scons_rules. This method should be implemented in subclass. """ console.error_exit('%s: should be subclassing' % self.type)
def protoc_plugin(**kwargs): """protoc_plugin. """ from blade.proto_library_target import ProtocPlugin if 'name' not in kwargs: console.error_exit("Missing 'name' in protoc_plugin parameters: %s" % kwargs) section = _blade_config.get_section('protoc_plugin_config') section[kwargs['name']] = ProtocPlugin(**kwargs)
def cc_test_config(append=None, **kwargs): """cc_test_config section. """ heap_check = kwargs.get('heap_check') if heap_check is not None and heap_check not in HEAP_CHECK_VALUES: console.error_exit('cc_test_config: heap_check can only be in %s' % HEAP_CHECK_VALUES) _blade_config.update_config('cc_test_config', append, kwargs)
def cc_test_config(append=None, **kwargs): """cc_test_config section. """ heap_check = kwargs.get('heap_check') if heap_check is not None and heap_check not in HEAP_CHECK_VALUES: console.error_exit('cc_test_config: heap_check can only be in %s' % HEAP_CHECK_VALUES) _blade_config.update_config('cc_test_config', append, kwargs)
def ninja_cc_source(self, source): if source.endswith('.l') or source.endswith('.y'): return source + '.c' elif source.endswith('.ll') or source.endswith('.yy'): return source + '.cc' else: console.error_exit('%s: Unknown source %s' % (self.fullname, source))
def protoc_plugin(**kwargs): """protoc_plugin. """ from blade.proto_library_target import ProtocPlugin if 'name' not in kwargs: console.error_exit("Missing 'name' in protoc_plugin parameters: %s" % kwargs) section = _blade_config.get_section('protoc_plugin_config') section[kwargs['name']] = ProtocPlugin(**kwargs)
def _proto_go_rules(self): """Generate go files. """ env_name = self._env_name() var_name = self._var_name('go') go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit('%s: go_home is not configured in BLADE_ROOT.' % self.fullname) proto_go_path = config.get_item('proto_library_config', 'protobuf_go_path') self._write_rule('%s.Replace(PROTOBUFGOPATH="%s")' % (env_name, proto_go_path)) self._write_rule('%s = []' % var_name) for src in self.srcs: proto_src = os.path.join(self.path, src) go_src = self._proto_gen_go_file(src) go_src_var = self._var_name_of(src, 'go_src') self._write_rule('%s = %s.ProtoGo("%s", "%s")' % ( go_src_var, env_name, go_src, proto_src)) # Copy the generated go sources to $GOPATH # according to the standard go directory layout proto_dir = os.path.dirname(src) proto_name = os.path.basename(src) go_dst = os.path.join(go_home, 'src', proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) go_dst_var = self._var_name_of(src, 'go_dst') self._write_rule('%s = %s.ProtoGoSource("%s", %s)' % ( go_dst_var, env_name, go_dst, go_src_var)) self._write_rule('%s.append(%s)' % (var_name, go_dst_var)) self._add_target_var('go', var_name)
def query_dependency_tree(self, output_file): """Query the dependency tree of the specified targets. """ if self.__options.dependents: console.error_exit('Only query --deps can be output as tree format') print(file=output_file) for key in self.__expanded_command_targets: self._query_dependency_tree(key, 0, self.__build_targets, output_file) print(file=output_file)
def _check_test_related_envs(kwargs): for name in kwargs.get('test_related_envs', []): try: re.compile(name) except re.error as e: console.error_exit( '%s: "global_config.test_related_envs": Invalid env name or regex "%s", %s' % (_blade_config.current_file_name, name, e))
def _check_run_targets(self): """check that run command should have only one target. """ if not self.targets or ':' not in self.targets[0]: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if len(self.targets) > 1: console.warning('run command will only take one target to build and run')
def _get_artifact_from_database(self, id, classifier): """get_artifact_from_database. """ self._check_config() self._check_id(id) if (id, classifier) not in self.__jar_database: if not self._download_artifact(id, classifier): console.error_exit('Download %s failed' % id) return self.__jar_database[(id, classifier)]
def _compiler_target_arch(self): """Compiler(gcc) target architecture. """ arch = BuildPlatform._get_cc_target_arch() pos = arch.find('-') if pos == -1: console.error_exit('Unknown target architecture %s from gcc.' % arch) return arch[:pos]
def ninja_cc_source(self, source): if source.endswith('.l') or source.endswith('.y'): return source + '.c' elif source.endswith('.ll') or source.endswith('.yy'): return source + '.cc' else: console.error_exit('%s: Unknown source %s' % (self.fullname, source))
def query_dependency_tree(self, output_file): """Query the dependency tree of the specified targets. """ if self.__options.dependents: console.error_exit('only query --deps can be output as tree format') print(file=output_file) for key in self.__all_command_targets: self._query_dependency_tree(key, 0, self.__build_targets, output_file) print(file=output_file)
def lock_workspace(): lock_file_fd, ret_code = lock_file('.Building.lock') if lock_file_fd == -1: if ret_code == errno.EAGAIN: console.error_exit( 'There is already an active building in current source tree.') else: console.error_exit('Lock exception, please try it later.') return lock_file_fd
def lock_workspace(): lock_file_fd, ret_code = lock_file('.Building.lock') if lock_file_fd == -1: if ret_code == errno.EAGAIN: console.error_exit( 'There is already an active building in current source tree.') else: console.error_exit('Lock exception, please try it later.') return lock_file_fd
def lock_workspace(build_dir): _BUILDING_LOCK_FILE ='.blade.building.lock' lock_file_fd, ret_code = lock_file(os.path.join(build_dir, _BUILDING_LOCK_FILE)) if lock_file_fd == -1: if ret_code == errno.EAGAIN: console.error_exit('There is already an active building in current workspace.') else: console.error_exit('Lock exception, please try it later.') return lock_file_fd
def register_target(self, target): """Register a target into blade target database. It is used to do quick looking. """ key = target.key # Check whether there is already a key in database if key in self.__target_database: console.error_exit('Target %s is duplicate in //%s/BUILD' % (target.name, target.path)) self.__target_database[key] = target
def register_target(self, target): """Register a target into blade target database. It is used to do quick looking. """ key = target.key # Check whether there is already a key in database if key in self.__target_database: console.error_exit('Target %s is duplicate in //%s/BUILD' % ( target.name, target.path)) self.__target_database[key] = target
def _dump_compdb(options, output_file_name): backend_builder = config.get_item('global_config', 'backend_builder') if backend_builder != 'ninja': console.error_exit('dump compdb only work when backend_builder is ninja') rules = build_manager.instance.get_all_rule_names() cmd = ['ninja', '-t', 'compdb'] + rules cmdstr = subprocess.list2cmdline(cmd) cmdstr += ' > ' cmdstr += output_file_name return _run_backend_builder(cmdstr)
def _dump_compdb(options, output_file_name): native_builder = config.get_item('global_config', 'native_builder') if native_builder != 'ninja': console.error_exit('dump compdb only work when native_builder is ninja') rules = build_manager.instance.get_all_rule_names() cmd = ['ninja', '-t', 'compdb'] + rules cmdstr = subprocess.list2cmdline(cmd) cmdstr += ' > ' cmdstr += output_file_name return _run_native_builder(cmdstr)
def try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): console.info('loading config file "%s"' % filename) execfile(filename, _config_globals, None) except SystemExit: console.error_exit('Parse error in config file %s' % filename) finally: self.current_file_name = ''
def _check_test_ignored_envs(kwargs): names = kwargs.get('test_ignored_envs') if not names: return for name in names: try: re.compile(name) except re.error as e: console.error_exit( '%s: global_config.test_ignored_envs: Invalid env name or regex "%s", %s' % (_blade_config.current_file_name, name, e))
def _get_go_package_name(self, path): with open(path) as f: content = f.read() pattern = r'^\s*option\s+go_package\s*=\s*"([\w./]+)";' m = re.search(pattern, content, re.MULTILINE) if m: return m.group(1) else: console.error_exit('%s: "go_package" is mandatory to generate golang code ' 'in protocol buffers but is missing in %s.' % ( self.fullname, path))
def try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): console.info('loading config file "%s"' % filename) exec_(filename, _config_globals, None) except SystemExit: console.error_exit('Parse error in config file %s' % filename) finally: self.current_file_name = ''
def _get_go_package_name(self, path): with open(path) as f: content = f.read() pattern = r'^\s*option\s+go_package\s*=\s*"([\w./]+)";' m = re.search(pattern, content, re.MULTILINE) if m: return m.group(1) else: console.error_exit( '%s: "go_package" is mandatory to generate golang code ' 'in protocol buffers but is missing in %s.' % (self.fullname, path))
def _check_run_targets(self): """check that run command should have only one target. """ if not self.targets or ':' not in self.targets[0]: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if len(self.targets) > 1: console.warning('run command will only take one target to build and run') if self.options.runargs: console.warning('--runargs has been deprecated, please put all run' ' arguments after a "--"') self.options.args = shlex.split(self.options.runargs) + self.options.args
def _generate_cc_source(self, var_name, src): """Generate scons rules for cc source from lex/yacc source. """ env_name = self._env_name() source = self._source_file_path(src) target = self._target_file_path(src) if src.endswith('.l') or src.endswith('.y'): rule = 'target = "%s" + top_env["CFILESUFFIX"], source = "%s"' % (target, source) self._write_rule('%s = %s.CFile(%s)' % (var_name, env_name, rule)) elif src.endswith('.ll') or src.endswith('.yy'): rule = 'target = "%s" + top_env["CXXFILESUFFIX"], source = "%s"' % (target, source) self._write_rule('%s = %s.CXXFile(%s)' % (var_name, env_name, rule)) else: console.error_exit('%s: Unknown source %s' % (self.fullname, src))
def __init__(self, name, srcs, deps, base, visibility, kwargs): """Init method. """ PythonTarget.__init__(self, name, 'prebuilt_py_library', srcs, deps, base, visibility, kwargs) if base: self.error_exit("Prebuilt py_library doesn't support base") if len(self.srcs) != 1: self.error_exit('There can only be 1 file in prebuilt py_library') src = self.srcs[0] if not src.endswith('.egg') and not src.endswith('.whl'): console.error_exit( '%s: Invalid file %s in srcs, prebuilt py_library only support egg and whl' % (self.fullname, src))
def _check_proto_deps(self): """Only proto_library or gen_rule target is allowed as deps. """ proto_config = config.get_section('proto_library_config') protobuf_libs = var_to_list(proto_config['protobuf_libs']) protobuf_java_libs = var_to_list(proto_config['protobuf_java_libs']) protobuf_libs = [self._unify_dep(d) for d in protobuf_libs + protobuf_java_libs] proto_deps = protobuf_libs + self.data['protoc_plugin_deps'] for dkey in self.deps: if dkey in proto_deps: continue dep = self.target_database[dkey] if dep.type != 'proto_library' and dep.type != 'gen_rule': console.error_exit('%s: Invalid dep %s. Proto_library can ' 'only depend on proto_library or gen_rule.' % (self.fullname, dep.fullname))
def _check_thrift_srcs_name(self, srcs): """Checks whether the thrift file's name ends with 'thrift'. """ error = 0 for src in srcs: base_name = os.path.basename(src) pos = base_name.rfind('.') if pos == -1: console.error('invalid thrift file name %s' % src) error += 1 file_suffix = base_name[pos + 1:] if file_suffix != 'thrift': console.error('invalid thrift file name %s' % src) error += 1 if error > 0: console.error_exit('invalid thrift file names found.')
def __check_test_data_dest(self, target, dest, dest_list): """Check whether the destination of test data is valid or not. """ dest_norm = os.path.normpath(dest) if dest in dest_list: console.error_exit('Ambiguous testdata of %s: %s, exit...' % ( target.fullname, dest)) for item in dest_list: item_norm = os.path.normpath(item) if len(dest_norm) >= len(item_norm): long_path, short_path = dest_norm, item_norm else: long_path, short_path = item_norm, dest_norm if (long_path.startswith(short_path) and long_path[len(short_path)] == '/'): console.error_exit('%s could not exist with %s in testdata of %s' % ( dest, item, target.fullname))
def _get_source_path(self, src, dst): """ Return src full path within the workspace and mapping path in the archive. """ if '..' in src or '..' in dst: console.error_exit('%s: Invalid src (%s, %s). Relative path is not allowed.' % (self.fullname, src, dst)) elif src.startswith('//'): src = src[2:] path = src else: path = self._source_file_path(src) if not dst: dst = src return path, dst
def _add_package_source(self, src, dst): """Add regular file or directory. """ src, dst = self._get_source_path(src, dst) if not os.path.exists(src): console.error_exit('%s: Package source %s does not exist.' % ( self.fullname, src)) elif os.path.isfile(src): self.data['sources'].append((src, dst)) else: for dir, subdirs, files in os.walk(src): # Skip over subdirs starting with '.', such as .svn subdirs[:] = [d for d in subdirs if not d.startswith('.')] for f in files: f = os.path.join(dir, f) rel_path = os.path.relpath(f, src) self.data['sources'].append((f, os.path.join(dst, rel_path)))
def run_target(self, target_name): """Run one single target. """ target_key = tuple(target_name.split(':')) target = self.targets[target_key] if target.type not in self.run_list: console.error_exit('target %s:%s is not a target that could run' % ( target_key[0], target_key[1])) run_env = self._prepare_env(target) cmd = [os.path.abspath(self._executable(target))] + self.options.args shell = target.data.get('run_in_shell', False) if shell: cmd = subprocess.list2cmdline(cmd) console.info("'%s' will be ran" % cmd) sys.stdout.flush() p = subprocess.Popen(cmd, env=run_env, close_fds=True, shell=shell) p.wait() self._clean_env() return p.returncode
def _process_srcs(self, srcs): """ Process sources which could be regular files, directories or location references. """ for s in srcs: if isinstance(s, tuple): src, dst = s elif isinstance(s, str): src, dst = s, '' else: console.error_exit('%s: Invalid src %s. src should ' 'be either str or tuple.' % (self.fullname, s)) m = location_re.search(src) if m: self._add_location_reference(m, dst) else: self._add_package_source(src, dst)
def scons_rules(self): """scons_rules. It outputs the scons rules according to user options. """ self._prepare_to_generate_rule() dep_files_map = {} dep_files_map = self._swig_library_rules_py() if (getattr(self.options, 'generate_java', False) or self.data.get('generate_java')): self._swig_library_rules_java(dep_files_map) if getattr(self.options, 'generate_php', False): if not self.php_inc_list: console.error_exit('failed to build //%s:%s, please install php modules' % ( self.path, self.name)) else: self._swig_library_rules_php(dep_files_map)
def ninja_command(self): cmd = self.data['cmd'] cmd = cmd.replace('$SRCS', '${in}') cmd = cmd.replace('$OUTS', '${out}') cmd = cmd.replace('$FIRST_SRC', '${_in_1}') cmd = cmd.replace('$FIRST_OUT', '${_out_1}') cmd = cmd.replace('$BUILD_DIR', self.build_path) locations = self.data['locations'] if locations: targets = self.blade.get_build_targets() locations_paths = [] for key, label in locations: path = targets[key]._get_target_file(label) if not path: console.error_exit('%s: Invalid location reference %s %s' % (self.fullname, ':'.join(key), label)) locations_paths.append(path) cmd = cmd % tuple(locations_paths) return cmd
def _process_test_data(self, testdata): """ Process test data of which the source could be regular file or location reference. """ self.data['testdata'], self.data['locations'] = [], [] for td in testdata: if isinstance(td, tuple): src, dst = td elif isinstance(td, str): src, dst = td, '' else: console.error_exit('%s: Invalid testdata %s. Test data should ' 'be either str or tuple.' % (self.fullname, td)) m = location_re.search(src) if m: key, type = self._add_location_reference_target(m) self.data['locations'].append((key, type, dst)) else: self.data['testdata'].append(td)
def _process_resources(self, resources): """ Process resources which could be regular files/directories or location references. """ self.data['resources'], self.data['location_resources'] = [], [] for resource in resources: if isinstance(resource, tuple): src, dst = resource elif isinstance(resource, str): src, dst = resource, '' else: console.error_exit('%s: Invalid resource %s. Resource should ' 'be either str or tuple.' % (self.fullname, resource)) m = location_re.search(src) if m: key, type = self._add_location_reference_target(m) self.data['location_resources'].append((key, type, dst)) else: self.data['resources'].append((src, dst))
def _get_resource_path(self, resource): """ Given a resource return its full path within the workspace and mapping path in the jar. """ full_path, res_path, jar_path = '', resource[0], resource[1] if '..' in res_path: console.error_exit('%s: Invalid resource %s. Relative path is not allowed.' % (self.fullname, res_path)) elif res_path.startswith('//'): res_path = res_path[2:] full_path = res_path if not jar_path: jar_path = res_path else: full_path = self._source_file_path(res_path) if not jar_path: # Mapping rules from maven standard layout jar_path = self._java_resource_path(res_path) return full_path, jar_path
def _load_build_file(source_dir, processed_source_dirs, blade): """Load the BUILD and place the targets into database. Invoked by _load_targets. Load and execute the BUILD file, which is a Python script, in source_dir. Statements in BUILD depends on global variable current_source_dir, and will register build target/rules into global variables target_database. Report error and exit if path/BUILD does NOT exist. The parameters processed_source_dirs refers to a set defined in the caller and used to avoid duplicated execution of BUILD files. """ source_dir = os.path.normpath(source_dir) # TODO(yiwang): the character '#' is a magic value. if source_dir in processed_source_dirs or source_dir == '#': return processed_source_dirs.add(source_dir) if not os.path.exists(source_dir): _report_not_exist(source_dir, source_dir, blade) old_current_source_path = blade.get_current_source_path() blade.set_current_source_path(source_dir) build_file = os.path.join(source_dir, 'BUILD') if os.path.exists(build_file) and not os.path.isdir(build_file): try: # The magic here is that a BUILD file is a Python script, # which can be loaded and executed by execfile(). global __current_globles __current_globles = build_rules.get_all() execfile(build_file, __current_globles, None) except SystemExit: console.error_exit('%s: fatal error' % build_file) except: # pylint: disable=bare-except console.error_exit('Parse error in %s\n%s' % ( build_file, traceback.format_exc())) else: _report_not_exist(source_dir, build_file, blade) blade.set_current_source_path(old_current_source_path)
def __init__(self, name, srcs, deps, warning, defs, incs, allow_undefined, recursive, prefix, blade, kwargs): """Init method. Init the cc lex yacc target """ if (len(srcs) != 2 or (not (srcs[0].endswith('.l') or srcs[0].endswith('.ll'))) or (not (srcs[1].endswith('.y') or srcs[1].endswith('.yy')))): console.error_exit('%s: srcs for lex_yacc_library should be ' 'a pair of (lex_source, yacc_source)' % self.fullname) CcTarget.__init__(self, name, 'lex_yacc_library', srcs, deps, None, warning, defs, incs, [], [], [], [], blade, kwargs) self.data['recursive'] = recursive self.data['prefix'] = prefix self.data['allow_undefined'] = allow_undefined self.data['link_all_symbols'] = True
def __init__(self, name, srcs, deps, base, visibility, kwargs): """Init method. """ PythonTarget.__init__(self, name, 'prebuilt_py_library', srcs, deps, base, visibility, kwargs) if base: console.error_exit("%s: Prebuilt py_library doesn't support base" % self.fullname) if len(self.srcs) != 1: console.error_exit('%s: There can only be 1 file in prebuilt py_library' % self.fullname) src = self.srcs[0] if not src.endswith('.egg') and not src.endswith('.whl'): console.error_exit( '%s: Invalid file %s in srcs, prebuilt py_library only support egg and whl' % (self.fullname, src))