def __init__(self, argv): """Init the class.""" self.options, others = self._cmd_parse(argv) # If '--' in arguments, use all other arguments after it as run # arguments if '--' in others: pos = others.index('--') self.targets = others[:pos] self.options.args = others[pos + 1:] else: self.targets = others self.options.args = [] for t in self.targets: if t.startswith('-'): console.fatal('Unrecognized option %s, use blade [action] ' '--help to get all the options' % t) command = self.options.command # Check the options with different sub command actions = { 'build': self._check_build_command, 'clean': self._check_clean_command, 'dump': self._check_dump_command, 'query': self._check_query_command, 'run': self._check_run_command, 'test': self._check_test_command, } actions[command]()
def _expand_target_deps(target_id, targets, root_targets=None): """_expand_target_deps. Return all targets depended by target_id directly and/or indirectly. We need the parameter root_target_id to check loopy dependency. """ target = targets[target_id] if target.expanded_deps is not None: return target.expanded_deps if root_targets is None: root_targets = set() root_targets.add(target_id) new_deps_list = [] for d in target.deps: # loop dependency if d in root_targets: err_msg = '' for t in root_targets: err_msg += '//%s --> ' % t console.fatal('Loop dependency found: //%s --> [%s]' % (d, err_msg)) new_deps_list.append(d) new_deps_list += _expand_target_deps(d, targets, root_targets) target.check_visibility() new_deps_list = _unique_deps(new_deps_list) target.expanded_deps = new_deps_list root_targets.remove(target_id) return new_deps_list
def _compiler_target_arch(self): """Compiler(gcc) target architecture.""" arch = ToolChain.get_cc_target_arch() pos = arch.find('-') if pos == -1: console.fatal('Unknown target architecture %s from gcc.' % arch) return arch[:pos]
def _check_run_targets(self): """check that run command should have only one target.""" if len(self.targets) != 1 or ':' not in self.targets[ 0] or self.targets[0].endswith('...'): console.fatal('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)')
def _normalize_one(target, working_dir): """Normalize target from command line form into canonical form. Target canonical form: dir:name dir: relative to blade_root_dir, use '.' for blade_root_dir name: name if target is dir:name '*' if target is dir '...' if target is dir/... """ if target.startswith('//'): target = target[2:] elif target.startswith('/'): console.fatal('Invalid target "%s" starting from root path.' % target) else: if working_dir != '.': target = os.path.join(working_dir, target) if ':' in target: path, name = target.rsplit(':', 1) else: if target.endswith('...'): path = target[:-3] name = '...' else: path = target name = '*' path = os.path.normpath(path) return '%s:%s' % (path, name)
def _find_all_deps(target_id, targets, deps_map_cache, root_targets=None): """_find_all_deps. Return all targets depended by target_id directly and/or indirectly. We need the parameter root_target_id to check loopy dependency. """ new_deps_list = deps_map_cache.get(target_id) if new_deps_list is not None: return new_deps_list if root_targets is None: root_targets = set() root_targets.add(target_id) new_deps_list = [] for d in targets[target_id].expanded_deps: # loop dependency if d in root_targets: err_msg = '' for t in root_targets: err_msg += '//%s:%s --> ' % (t[0], t[1]) console.fatal('Loop dependency found: //%s:%s --> [%s]' % (d[0], d[1], err_msg)) _check_dep_visibility(target_id, d, targets) new_deps_list.append(d) new_deps_list += _find_all_deps(d, targets, deps_map_cache, root_targets) new_deps_list = _unique_deps(new_deps_list) deps_map_cache[target_id] = new_deps_list root_targets.remove(target_id) return new_deps_list
def go_package(name, deps=[], testdata=[], visibility=None, extra_goflags=None): path = build_manager.instance.get_current_source_path() srcs, tests = find_go_srcs(path) if not srcs and not tests: console.fatal('Empty go sources in %s' % path) if srcs: main = False for src in srcs: package = extract_go_package(os.path.join(path, src)) if package == 'main': main = True break if main: go_binary(name=name, srcs=srcs, deps=deps, visibility=visibility, extra_goflags=extra_goflags) else: go_library(name=name, srcs=srcs, deps=deps, visibility=visibility, extra_goflags=extra_goflags) if tests: go_test(name='%s_test' % name, srcs=tests, deps=deps, visibility=visibility, testdata=testdata, extra_goflags=extra_goflags)
def parse(self, argv): """Parse command line.""" options, others = self._arg_parser.parse_known_args(argv) # If '--' in arguments, use all other arguments after it as run # arguments if '--' in others: pos = others.index('--') targets = others[:pos] options.args = others[pos + 1:] else: targets = others options.args = [] for t in targets: if t.startswith('-'): console.fatal('Unrecognized option %s, use blade [action] ' '--help to get all the options' % t) command = options.command # Check the options with different sub command self._check_subcommand(command, options, targets) return command, options, targets
def __init__(self, name, srcs, deps, visibility, base, kwargs): """Init method. """ super(PrebuiltPythonLibrary, self).__init__( name=name, type='prebuilt_py_library', srcs=srcs, deps=deps, visibility=visibility, base=base, kwargs=kwargs) if base: self.fatal("Prebuilt py_library doesn't support base") if len(self.srcs) != 1: self.fatal('There can only be 1 file in prebuilt py_library') src = self.srcs[0] if not src.endswith('.egg') and not src.endswith('.whl'): console.fatal( '%s: Invalid file "%s" in srcs, prebuilt py_library only support egg and whl' % (self.fullname, src))
def __init__(self, log_dir): """Init method. """ if not os.path.exists(log_dir): os.makedirs(log_dir) self.__log_dir = log_dir # key: (id, classifier) # id: jar id in the format group:artifact:version # value: an instance of MavenArtifact self.__jar_database = {} java_config = config.get_section('java_config') self.__maven = java_config.get('maven') self.__central_repository = java_config.get('maven_central') self._check_config() self.__snapshot_update_policy = java_config.get( 'maven_snapshot_update_policy') if self.__snapshot_update_policy == 'interval': interval = java_config.get('maven_snapshot_update_interval') if not interval: console.fatal( 'java_config: "maven_snapshot_update_interval" is required when ' '"maven_snapshot_update_policy" is "interval"') self.__snapshot_update_interval = interval * 60 # minutes else: self.__snapshot_update_interval = 86400 # Local repository is set to the maven default directory # and could not be configured currently local_repository = '~/.m2/repository' self.__local_repository = os.path.expanduser(local_repository) # Download the snapshot artifact daily self.__build_time = time.time()
def __load_build_file(source_dir, blade): """ Load and execute the BUILD file, which is a Python script, in source_dir. Statements in BUILD depends on global variable current_source_dir, and will register build target/rules into global variables target_database. Report error and exit if path/BUILD does NOT exist. """ if not os.path.isdir(source_dir): _report_not_exist('Directory', source_dir, source_dir, blade) return False old_current_source_path = blade.get_current_source_path() try: blade.set_current_source_path(source_dir) build_file = os.path.join(source_dir, 'BUILD') if os.path.isfile(build_file): try: # The magic here is that a BUILD file is a Python script, # which can be loaded and executed by execfile(). global __current_globals __current_globals = build_rules.get_all() exec_file(build_file, __current_globals, None) return True except SystemExit: console.fatal('%s: Fatal error' % build_file) except: # pylint: disable=bare-except console.fatal('Parse error in %s\n%s' % (build_file, traceback.format_exc())) else: _report_not_exist('File', build_file, source_dir, blade) finally: blade.set_current_source_path(old_current_source_path) return False
def query_dependency_tree(self, output_file): """Query the dependency tree of the specified targets. """ if self.__options.dependents: console.fatal('Only query --deps can be output as tree format') print(file=output_file) for key in self.__expanded_command_targets: self._query_dependency_tree(key, 0, self.__build_targets, output_file) print(file=output_file)
def generate_proto_rules(self): proto_config = config.get_section('proto_library_config') protoc = proto_config['protoc'] protoc_java = protoc if proto_config['protoc_java']: protoc_java = proto_config['protoc_java'] protobuf_incs = protoc_import_path_option(proto_config['protobuf_incs']) protobuf_java_incs = protobuf_incs if proto_config['protobuf_java_incs']: protobuf_java_incs = protoc_import_path_option(proto_config['protobuf_java_incs']) self._add_rule(textwrap.dedent('''\ protocflags = protoccpppluginflags = protocjavapluginflags = protocpythonpluginflags = ''')) self.generate_rule(name='proto', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--cpp_out=%s ${protocflags} ${protoccpppluginflags} ${in}' % ( protoc, protobuf_incs, self.build_dir), description='PROTOC ${in}') self.generate_rule(name='protojava', command='%s --proto_path=. %s --java_out=%s/`dirname ${in}` ' '${protocjavapluginflags} ${in}' % ( protoc_java, protobuf_java_incs, self.build_dir), description='PROTOCJAVA ${in}') self.generate_rule(name='protopython', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--python_out=%s ${protocpythonpluginflags} ${in}' % ( protoc, protobuf_incs, self.build_dir), description='PROTOCPYTHON ${in}') self.generate_rule(name='protodescriptors', command='%s --proto_path=. %s -I=`dirname ${first}` ' '--descriptor_set_out=${out} --include_imports ' '--include_source_info ${in}' % ( protoc, protobuf_incs), description='PROTODESCRIPTORS ${in}') protoc_go_plugin = proto_config['protoc_go_plugin'] if protoc_go_plugin: go_home = config.get_item('go_config', 'go_home') go_module_enabled = config.get_item('go_config', 'go_module_enabled') go_module_relpath = config.get_item('go_config', 'go_module_relpath') if not go_home: console.fatal('"go_config.go_home" is not configured') if go_module_enabled and not go_module_relpath: outdir = proto_config['protobuf_go_path'] else: outdir = os.path.join(go_home, 'src') subplugins = proto_config['protoc_go_subplugins'] if subplugins: go_out = 'plugins=%s:%s' % ('+'.join(subplugins), outdir) else: go_out = outdir self.generate_rule(name='protogo', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--plugin=protoc-gen-go=%s --go_out=%s ${in}' % ( protoc, protobuf_incs, protoc_go_plugin, go_out), description='PROTOCGOLANG ${in}')
def lock_workspace(build_dir): _BUILDING_LOCK_FILE = '.blade.building.lock' lock_file_fd, ret_code = lock_file(os.path.join(build_dir, _BUILDING_LOCK_FILE)) if lock_file_fd == -1: if ret_code == errno.EAGAIN: console.fatal('There is already an active building in current workspace.') else: console.fatal('Lock exception, please try it later.') return lock_file_fd
def _check_run_targets(self): """check that run command should have only one target. """ if not self.targets or ':' not in self.targets[0]: console.fatal('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if len(self.targets) > 1: console.warning( 'Run command will only take one target to build and run')
def register_target(self, target): """Register a target into blade target database. It is used to do quick looking. """ key = target.key # Check whether there is already a key in database if key in self.__target_database: console.fatal('Target %s is duplicate in //%s/BUILD' % (target.name, target.path)) self.__target_database[key] = target
def try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): console.info('Loading config file "%s"' % filename) exec_(filename, _config_globals, None) except SystemExit: console.fatal('Parse error in config file %s' % filename) finally: self.current_file_name = ''
def _dump_compdb(options, output_file_name): backend_builder = config.get_item('global_config', 'backend_builder') if backend_builder != 'ninja': console.fatal('Dump compdb only work when backend_builder is ninja') rules = build_manager.instance.get_all_rule_names() cmd = ['ninja', '-f', build_manager.instance.build_script(), '-t', 'compdb'] cmd += rules cmdstr = subprocess.list2cmdline(cmd) cmdstr += ' > ' cmdstr += output_file_name return _run_backend_builder(cmdstr)
def __check_test_data_dest(self, target, dest, dest_list): """Check whether the destination of test data is valid or not. """ dest_norm = os.path.normpath(dest) if dest in dest_list: console.fatal('Ambiguous testdata of %s: %s, exit...' % (target.fullname, dest)) for item in dest_list: item_norm = os.path.normpath(item) if len(dest_norm) >= len(item_norm): long_path, short_path = dest_norm, item_norm else: long_path, short_path = item_norm, dest_norm if long_path.startswith(short_path) and long_path[len(short_path)] == '/': target.fatal('"%s" could not exist with "%s" in testdata' % (dest, item))
def _parse_qyery_path_to(self): """Parse the `--path-to` command line argument""" if not self.__options.query_path_to: return set() result = set() for id in target.normalize(self.__options.query_path_to.split(','), self.__working_dir): if id not in self.__target_database: console.fatal( 'Invalid argument: "--path_to=%s", target "%s" does not exist' % (self.__options.query_path_to, id)) result.add(id) return result
def try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): console.info('Loading config file "%s"' % filename) with open(filename, 'rb') as f: content = f.read() self.__md5.update(content) exec_file_content(filename, content, _config_globals, None) except SystemExit: console.fatal('Parse error in config file %s' % filename) finally: self.current_file_name = ''
def _check_thrift_srcs_name(self, srcs): """Checks whether the thrift file's name ends with 'thrift'. """ error = 0 for src in srcs: base_name = os.path.basename(src) pos = base_name.rfind('.') if pos == -1: console.error('Invalid thrift file name %s' % src) error += 1 file_suffix = base_name[pos + 1:] if file_suffix != 'thrift': console.error('Invalid thrift file name %s' % src) error += 1 if error > 0: console.fatal('Invalid thrift file names found.')
def find_blade_root_dir(working_dir=None): """find_blade_root_dir to find the dir holds the BLADE_ROOT file. The blade_root_dir is the directory which is the closest upper level directory of the current working directory, and containing a file named BLADE_ROOT. """ blade_root = find_file_bottom_up('BLADE_ROOT', from_dir=working_dir) if not blade_root: console.fatal( "Can't find the file 'BLADE_ROOT' in this or any upper directory.\n" "Blade need this file as a placeholder to locate the root source directory " "(aka the directory where you #include start from).\n" "You should create it manually at the first time.") return os.path.dirname(blade_root)
def _get_cc_version(self): version = '' if 'gcc' in self.cc: returncode, stdout, stderr = ToolChain._execute(self.cc + ' -dumpversion') if returncode == 0: version = stdout.strip() elif 'clang' in self.cc: returncode, stdout, stderr = ToolChain._execute(self.cc + ' --version') if returncode == 0: line = stdout.splitlines()[0] pos = line.find('version') if pos == -1: version = line else: version = line[pos + len('version') + 1:] if not version: console.fatal('Failed to obtain cc toolchain.') return version
def __init__(self, path): self.path = path if not os.path.isfile(self.path): console.fatal('"%s" is not a valid file.' % self.path) self.thrift_name = os.path.basename(self.path)[:-7] # Package name for each language. self.package_name = {} # Set to true if there is at least one const definition in the # thrift file. self.has_constants = False self.enums = [] self.structs = [] self.exceptions = [] self.services = [] # Parse the thrift IDL file. self._parse_file()
def _check_dep_visibility(target, dep, targets): """Check whether target is able to depend on dep. """ if dep not in targets: console.fatal( 'Target %s:%s depends on %s:%s, but it is missing, exit...' % (target[0], target[1], dep[0], dep[1])) # Targets are visible inside the same BUILD file by default if target[0] == dep[0]: return d = targets[dep] visibility = getattr(d, 'visibility', 'PUBLIC') if visibility == 'PUBLIC': return if target not in visibility: console.fatal( '%s:%s is not allowed to depend on %s because of visibility.' % (target[0], target[1], d.fullname))
def _check_plat_and_profile_options(self, options, targets): """check platform and profile options.""" compiler_arch = self._compiler_target_arch() arch = BuildArchitecture.get_canonical_architecture(compiler_arch) if arch is None: console.fatal('Unknown architecture: %s' % compiler_arch) m = options.m if not m: options.arch = arch options.bits = BuildArchitecture.get_architecture_bits(arch) assert options.bits else: options.bits = m options.arch = BuildArchitecture.get_model_architecture(arch, m) if options.arch is None: console.fatal( '"-m%s" is not supported by the architecture %s' % (m, compiler_arch))
def _check_srcs(self): """Check source files. """ dups = [] srcset = set() for s in self.srcs: if s in srcset: dups.append(s) else: srcset.add(s) if dups: self.fatal('Duplicate source file paths: %s ' % dups) # Check if one file belongs to two different targets. action = config.get_item('global_config', 'duplicated_source_action') for s in self.srcs: if '..' in s or s.startswith('/'): self.fatal( 'Invalid source file path: %s. can only be relative path, and must ' 'in current directory or subdirectories.' % s) src = os.path.normpath(os.path.join(self.path, s)) target = self.fullname, self._allow_duplicate_source() if src not in Target.__src_target_map: Target.__src_target_map[src] = target else: target_existed = Target.__src_target_map[src] if target_existed != target: # Always preserve the target which disallows # duplicate source files in the map if target_existed[1]: Target.__src_target_map[src] = target elif target[1]: pass else: message = 'Source file %s belongs to {%s, %s}' % ( s, target_existed[0], target[0]) if action == 'error': console.fatal(message) elif action == 'warning': console.warning(message)
def _load_build_file(source_dir, processed_source_dirs, blade): """Load the BUILD and place the targets into database. Invoked by _load_targets. Load and execute the BUILD file, which is a Python script, in source_dir. Statements in BUILD depends on global variable current_source_dir, and will register build target/rules into global variables target_database. Report error and exit if path/BUILD does NOT exist. The parameters processed_source_dirs refers to a set defined in the caller and used to avoid duplicated execution of BUILD files. """ source_dir = os.path.normpath(source_dir) # TODO(yiwang): the character '#' is a magic value. if source_dir in processed_source_dirs or source_dir == '#': return processed_source_dirs.add(source_dir) if not os.path.exists(source_dir): _report_not_exist('Directory', source_dir, source_dir, blade) return old_current_source_path = blade.get_current_source_path() try: blade.set_current_source_path(source_dir) build_file = os.path.join(source_dir, 'BUILD') if os.path.exists(build_file) and not os.path.isdir(build_file): try: # The magic here is that a BUILD file is a Python script, # which can be loaded and executed by execfile(). global __current_globles __current_globles = build_rules.get_all() exec_file(build_file, __current_globles, None) except SystemExit: console.fatal('%s: Fatal error' % build_file) except: # pylint: disable=bare-except console.fatal('Parse error in %s\n%s' % ( build_file, traceback.format_exc())) else: _report_not_exist('File', build_file, source_dir, blade) finally: blade.set_current_source_path(old_current_source_path)
def _parse_file(self): for line in open(self.path): line = line.strip() if line.startswith('//') or line.startswith('#'): continue pos = line.find('//') if pos != -1: line = line[:pos] pos = line.find('#') if pos != -1: line = line[:pos] matched = re.match('^namespace ([0-9_a-zA-Z]+) ([0-9_a-zA-Z.]+)', line) if matched: lang, package = matched.groups() self.package_name[lang] = package continue matched = re.match( '(const|struct|service|enum|exception) ([0-9_a-zA-Z]+)', line) if not matched: continue kw, name = matched.groups() if kw == 'const': self.has_constants = True elif kw == 'struct': self.structs.append(name) elif kw == 'service': self.services.append(name) elif kw == 'enum': self.enums.append(name) elif kw == 'exception': self.exceptions.append(name) if self.has_constants or self.structs or self.enums or \ self.exceptions or self.services: pass else: console.fatal('%s is an empty thrift file.' % self.path)