def _find_all_deps(self, target_id, root_target_id=None): """_find_all_deps. Return all targets depended by target_id directly and/or indirectly. We need the parameter root_target_id to check loopy dependency. """ if root_target_id == None: root_target_id = target_id new_deps_list = self.deps_map_cache.get(target_id, []) if new_deps_list: return new_deps_list for d in self.targets[target_id]['deps']: if d == root_target_id: print "loop dependency of %s" % ':'.join(root_target_id) new_deps_piece = [d] if d not in self.targets: error_exit('Target %s:%s depends on %s:%s, ' 'but it is missing, exit...' % (target_id[0], target_id[1], d[0], d[1])) new_deps_piece += self._find_all_deps(d, root_target_id) # Append new_deps_piece to new_deps_list, be aware of # de-duplication: for nd in new_deps_piece: if nd in new_deps_list: new_deps_list.remove(nd) new_deps_list.append(nd) self.deps_map_cache[target_id] = new_deps_list return new_deps_list
def run_target(self, target_key): """Run one single target. """ target = self.targets.get(target_key, {}) if not target: error_exit("target %s:%s is not in the target databases" % ( target_key[0], target_key[1])) if target['type'] not in self.run_list: error_exit("target %s:%s is not a target that could run" % ( target_key[0], target_key[1])) self._prepare_run_env(target) old_pwd = get_cwd() cmd = "%s " % os.path.abspath(self._test_executable(target)) if self.options.runargs: cmd += "%s" % self.options.runargs info("it will run '%s' " % cmd ) sys.stdout.flush() target_dir = os.path.dirname(self._test_executable(target)) os.chdir(target_dir) run_env = dict(os.environ) run_env['LD_LIBRARY_PATH'] = target_dir p = subprocess.Popen(cmd, env=run_env, shell=True) p.wait() os.chdir(old_pwd) return p.returncode
def _generate_target_explict_dependency(self, target_files): """_generate_target_explict_dependency. Description ----------- Generates dependency relationship that two targets have no dependency but it really needs when user specify it in BUILD file. 1. gen_rule target should be needed by other targets """ if not target_files: return env_name = self._env_name() files = var_to_list(target_files) files_str = ",".join(["%s" % f for f in files]) if not self.blade.get_expanded(): error_exit("logic error in Blade, error in _generate_target_explict_dependency") targets = self.blade.get_all_targets_expanded() files_map = self.blade.get_gen_rule_files_map() deps = targets[self.key]['deps'] for d in deps: dep_target = targets[d] if dep_target['type'] == 'gen_rule': srcs_list = files_map[(dep_target['path'], dep_target['name'])] if srcs_list: self._write_rule("%s.Depends([%s], [%s])" % ( env_name, files_str, srcs_list))
def _check_deps_in_build_file(self, name, deps): """_check_deps_in_build_file. Parameters ----------- name: the target's name deps: the deps list in BUILD file Returns ----------- None Description ----------- Checks that whether users' build file is consistent with blade's rule. """ for dep in deps: if not (dep.startswith(':') or dep.startswith('#') or dep.startswith('//') or dep.startswith('./')): error_exit('%s/%s: Invalid dep in %s.' % ( self.current_source_path, name, dep)) if dep.count(':') > 1: error_exit('%s/%s: Invalid dep %s, missing \',\' between 2 deps?' % (self.current_source_path, name, dep))
def _generate_target_explict_dependency(self, target_files): """_generate_target_explict_dependency. Description ----------- Generates dependency relationship that two targets have no dependency but it really needs when user specify it in BUILD file. 1. gen_rule target should be needed by other targets """ if not target_files: return env_name = self._env_name() files = var_to_list(target_files) files_str = ",".join(["%s" % f for f in files]) if not self.blade.get_expanded(): error_exit( "logic error in Blade, error in _generate_target_explict_dependency" ) targets = self.blade.get_all_targets_expanded() files_map = self.blade.get_gen_rule_files_map() deps = targets[self.key]['deps'] for d in deps: dep_target = targets[d] if dep_target['type'] == 'gen_rule': srcs_list = files_map[(dep_target['path'], dep_target['name'])] if srcs_list: self._write_rule("%s.Depends([%s], [%s])" % (env_name, files_str, srcs_list))
def _check_deps_in_build_file(self, name, deps): """_check_deps_in_build_file. Parameters ----------- name: the target's name deps: the deps list in BUILD file Returns ----------- None Description ----------- Checks that whether users' build file is consistent with blade's rule. """ for dep in deps: if not (dep.startswith(':') or dep.startswith('#') or dep.startswith('//') or dep.startswith('./')): error_exit('%s/%s: Invalid dep in %s.' % (self.current_source_path, name, dep)) if dep.count(':') > 1: error_exit( '%s/%s: Invalid dep %s, missing \',\' between 2 deps?' % (self.current_source_path, name, dep))
def cc_test_config(**kwargs): """cc_test_config section. """ heap_check = kwargs.get('heap_check', '') if heap_check and heap_check not in HEAP_CHECK_VALUES: error_exit('cc_test_config: heap_check can only be in %s' % HEAP_CHECK_VALUES) global blade_config blade_config.update_config('cc_test_config', kwargs)
def _report_not_exist(source_dir, path, blade): """ Report dir or BUILD file does not exist """ depender = _find_dir_depender(source_dir, blade) if depender: error_exit('//%s not found, required by %s, exit...' % (path, depender)) else: error_exit('//%s not found, exit...' % path)
def scons_rules(self): """scons_rules. Parameters ----------- None Returns ----------- None Description ----------- It outputs the scons rules according to user options. """ self._clone_env() if self.data['type'] == 'prebuilt_py_binary': return env_name = self._env_name() setup_file = os.path.join(self.data['path'], "setup.py") python_package = os.path.join(self.data['path'], self.data['name']) init_file = os.path.join(python_package, '__init__.py') binary_files = [] if os.path.exists(setup_file): binary_files.append(setup_file) if not os.path.exists(init_file): error_exit("The __init__.py not existed in %s" % python_package) binary_files.append(init_file) dep_var_list = [] self.targets = self.blade.get_all_targets_expanded() for dep in self.targets[self.key]['deps']: if dep in self.blade.python_binary_dep_source_map.keys(): for f in self.blade.python_binary_dep_source_map[dep]: binary_files.append(f) for cmd in self.blade.python_binary_dep_source_cmd[dep]: dep_var_list.append(cmd) target_egg_file = "%s.egg" % self._target_file_path() python_binary_var = "%s_python_binary_var" % ( self._generate_variable_name(self.data['path'], self.data['name'])) self._write_rule("%s = %s.PythonBinary(['%s'], %s)" % ( python_binary_var, env_name, target_egg_file, binary_files)) for var in dep_var_list: self._write_rule("%s.Depends(%s, %s)" % ( env_name, python_binary_var, var))
def register_scons_target(self, target_key, scons_target): """Register scons targets into the scons targets map. It is used to do quick looking. """ # check that whether there is already a key in database if target_key in self.scons_targets_map.keys(): error_exit("target name %s is duplicate in //%s/BUILD" % (target_key[1], target_key[0])) self.scons_targets_map[target_key] = scons_target
def main(blade_path): exit_code = 0 try: exit_code = _main(blade_path) except SystemExit as e: exit_code = e.code except KeyboardInterrupt: error_exit("keyboard interrupted", -signal.SIGINT) except: error_exit(traceback.format_exc()) sys.exit(exit_code)
def register_scons_target(self, target_key, scons_target): """Register scons targets into the scons targets map. It is used to do quick looking. """ # check that whether there is already a key in database if target_key in self.scons_targets_map.keys(): error_exit("target name %s is duplicate in //%s/BUILD" % ( target_key[1], target_key[0])) self.scons_targets_map[target_key] = scons_target
def _check_color_options(self): """check color options. """ if self.options.color == "yes": self.options.color = True; elif self.options.color == "no": self.options.color = False; elif self.options.color == "auto" or self.options.color is None: self.options.color = (sys.stdout.isatty() and os.environ['TERM'] not in ('emacs', 'dumb')) else: error_exit("--color can only be yes, no or auto.") blade_util.color_enabled = self.options.color
def scons_rules(self): """scons_rules. Parameters ----------- None Returns ----------- None Description ----------- It outputs the scons rules according to user options. """ self._clone_env() if self.data['type'] == 'prebuilt_py_binary': return env_name = self._env_name() setup_file = os.path.join(self.data['path'], "setup.py") python_package = os.path.join(self.data['path'], self.data['name']) init_file = os.path.join(python_package, '__init__.py') binary_files = [] if os.path.exists(setup_file): binary_files.append(setup_file) if not os.path.exists(init_file): error_exit("The __init__.py not existed in %s" % python_package) binary_files.append(init_file) dep_var_list = [] self.targets = self.blade.get_all_targets_expanded() for dep in self.targets[self.key]['deps']: if dep in self.blade.python_binary_dep_source_map.keys(): for f in self.blade.python_binary_dep_source_map[dep]: binary_files.append(f) for cmd in self.blade.python_binary_dep_source_cmd[dep]: dep_var_list.append(cmd) target_egg_file = "%s.egg" % self._target_file_path() python_binary_var = "%s_python_binary_var" % ( self._generate_variable_name(self.data['path'], self.data['name'])) self._write_rule( "%s = %s.PythonBinary(['%s'], %s)" % (python_binary_var, env_name, target_egg_file, binary_files)) for var in dep_var_list: self._write_rule("%s.Depends(%s, %s)" % (env_name, python_binary_var, var))
def _check_color_options(self): """check color options. """ if self.options.color == "yes": self.options.color = True elif self.options.color == "no": self.options.color = False elif self.options.color == "auto" or self.options.color is None: self.options.color = (sys.stdout.isatty() and os.environ['TERM'] not in ('emacs', 'dumb')) else: error_exit("--color can only be yes, no or auto.") blade_util.color_enabled = self.options.color
def _load_build_file(source_dir, action_if_fail, processed_source_dirs, blade): """_load_build_file to load the BUILD and place the targets into database. Invoked by _load_targets. Load and execute the BUILD file, which is a Python script, in source_dir. Statements in BUILD depends on global variable current_source_dir, and will register build target/rules into global variables target_database. If path/BUILD does NOT exsit, take action corresponding to action_if_fail. The parameters processed_source_dirs refers to a set defined in the caller and used to avoid duplicated execution of BUILD files. """ # Initialize the build_target at first time, to be used for BUILD file # loaded by execfile global build_target if build_target is None: options = blade.get_options() if options.m == '32': arch = 'i386' elif options.m == '64': arch = 'x86_64' build_target = TargetAttributes(arch, int(options.m)) source_dir = os.path.normpath(source_dir) # TODO(yiwang): the character '#' is a magic value. if source_dir in processed_source_dirs or source_dir == '#': return processed_source_dirs.add(source_dir) if not os.path.exists(source_dir): _report_not_exist(source_dir, source_dir, blade) old_path_reserved = blade.get_current_source_path() blade.set_current_source_path(source_dir) build_file = os.path.join(source_dir, 'BUILD') if os.path.exists(build_file): try: # The magic here is that a BUILD file is a Python script, # which can be loaded and executed by execfile(). execfile(build_file, globals(), None) except SystemExit: error_exit("%s: fatal error, exit..." % build_file) except: error_exit('Parse error in %s, exit...\n%s' % ( build_file, traceback.format_exc())) else: if action_if_fail == ABORT_IF_FAIL: _report_not_exist(source_dir, build_file, blade) blade.set_current_source_path(old_path_reserved)
def _load_build_file(source_dir, action_if_fail, processed_source_dirs, blade): """_load_build_file to load the BUILD and place the targets into database. Invoked by _load_targets. Load and execute the BUILD file, which is a Python script, in source_dir. Statements in BUILD depends on global variable current_source_dir, and will register build target/rules into global variables target_database. If path/BUILD does NOT exsit, take action corresponding to action_if_fail. The parameters processed_source_dirs refers to a set defined in the caller and used to avoid duplicated execution of BUILD files. """ # Initialize the build_target at first time, to be used for BUILD file # loaded by execfile global build_target if build_target is None: options = blade.get_options() if options.m == '32': arch = 'i386' elif options.m == '64': arch = 'x86_64' build_target = TargetAttributes(arch, int(options.m)) source_dir = os.path.normpath(source_dir) # TODO(yiwang): the character '#' is a magic value. if source_dir in processed_source_dirs or source_dir == '#': return processed_source_dirs.add(source_dir) if not os.path.exists(source_dir): _report_not_exist(source_dir, source_dir, blade) old_path_reserved = blade.get_current_source_path() blade.set_current_source_path(source_dir) build_file = os.path.join(source_dir, 'BUILD') if os.path.exists(build_file): try: # The magic here is that a BUILD file is a Python script, # which can be loaded and executed by execfile(). execfile(build_file, globals(), None) except SystemExit: error_exit("%s: fatal error, exit..." % build_file) except: error_exit('Parse error in %s, exit...\n%s' % (build_file, traceback.format_exc())) else: if action_if_fail == ABORT_IF_FAIL: _report_not_exist(source_dir, build_file, blade) blade.set_current_source_path(old_path_reserved)
def parse(self): """load the configuration file and parse. """ try: bladerc_file = os.path.expanduser("~/.bladerc") if os.path.exists(bladerc_file): execfile(bladerc_file) except: error_exit("Parse error in config file bladerc, exit...\n%s" % traceback.format_exc()) try: execfile(os.path.join(self.current_source_dir, 'BLADE_ROOT')) except: error_exit("Parse error in config file BLADE_ROOT, exit...\n%s" % traceback.format_exc())
def generate_resource_file(target, source, env): src_path = str(source[0]) new_src_path = str(target[0]) cmd = "xxd -i %s | sed 's/unsigned char /const char RESOURCE_/g' > %s" % ( src_path, new_src_path) p = subprocess.Popen(cmd, env={}, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to generate resource file") return p.returncode
def generate_resource_file(target, source, env): src_path = str(source[0]) new_src_path = str(target[0]) cmd = "xxd -i %s | sed 's/unsigned char /const char RESOURCE_/g' > %s" % ( src_path, new_src_path) p = subprocess.Popen( cmd, env={}, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to generate resource file") return p.returncode
def _check_run_targets(self): """check that run command should have only one target. """ err = False targets = [] if len(self.targets) == 0: err = True elif self.targets[0].find(':') == -1: err = True if err: error_exit("Please specify a single target to run: " "blade run //target_path:target_name (or a_path:target_name)") if self.options.command == 'run' and len(self.targets) > 1: warning("run command will only take one target to build and run") if self.targets[0].startswith("//"): targets.append(self.targets[0][2:]) else: targets.append(self.targets[0]) self.targets = targets
def __init__(self): """Init the class. """ (self.options, self.targets) = self._cmd_parse() for t in self.targets: if t.startswith('-'): error_exit("unregconized option %s, use blade [action] " "--help to get all the options" % t) command = self.options.command # Check the options with different sub command actions = { 'build': self._build_action, 'run': self._run_action, 'test': self._test_action, 'clean': self._clean_action, 'query': self._query_action }[command]()
def _check_query_targets(self): """check query targets, should have a leaset one target. """ err = False targets = [] if len(self.targets) == 0: err = True for target in self.targets: if target.find(':') == -1: err = True break if target.startswith("//"): targets.append(target[2:]) else: targets.append(target) if err: error_exit("Please specify targets in this way: " "blade query //target_path:target_name (or a_path:target_name)") self.targets = targets
def __init__(self): """Init the class. """ (self.options, self.targets) = self._cmd_parse() for t in self.targets: if t.startswith('-'): error_exit("unregconized option %s, use blade [action] " "--help to get all the options" % t) command = self.options.command # Check the options with different sub command actions = { 'build' : self._build_action, 'run' : self._run_action, 'test' : self._test_action, 'clean' : self._clean_action, 'query' : self._query_action }[command]()
def _check_run_targets(self): """check that run command should have only one target. """ err = False targets = [] if len(self.targets) == 0: err = True elif self.targets[0].find(':') == -1: err = True if err: error_exit( "Please specify a single target to run: " "blade run //target_path:target_name (or a_path:target_name)") if self.options.command == 'run' and len(self.targets) > 1: warning("run command will only take one target to build and run") if self.targets[0].startswith("//"): targets.append(self.targets[0][2:]) else: targets.append(self.targets[0]) self.targets = targets
def _check_query_targets(self): """check query targets, should have a leaset one target. """ err = False targets = [] if len(self.targets) == 0: err = True for target in self.targets: if target.find(':') == -1: err = True break if target.startswith("//"): targets.append(target[2:]) else: targets.append(target) if err: error_exit( "Please specify targets in this way: " "blade query //target_path:target_name (or a_path:target_name)" ) self.targets = targets
def _convert_string_to_target_helper(self, target_string): """ Converting a string like thirdparty/gtest:gtest to tuple (target_path, target_name) """ bad_format = False if target_string: if target_string.startswith('#'): return ("#", target_string[1:]) elif target_string.find(":") != -1: path, name = target_string.split(":") return (path.strip(), name.strip()) else: bad_format = True else: bad_format = True if bad_format: error_exit("invalid target lib format: %s, " "should be #lib_name or lib_path:lib_name" % target_string)
def find_blade_root_dir(working_dir): """find_blade_root_dir to find the dir holds the BLADE_ROOT file. The blade_root_dir is the directory which is the closest upper level directory of the current working directory, and containing a file named BLADE_ROOT. """ blade_root_dir = working_dir if blade_root_dir.endswith('/'): blade_root_dir = blade_root_dir[:-1] while blade_root_dir and blade_root_dir != "/": if os.path.isfile(os.path.join(blade_root_dir, "BLADE_ROOT")): break blade_root_dir = os.path.dirname(blade_root_dir) if not blade_root_dir or blade_root_dir == "/": error_exit("Can't find the file 'BLADE_ROOT' in this or any upper directory.\n" "Blade need this file as a placeholder to locate the root source directory " "(aka the directory where you #include start from).\n" "You should create it manually at the first time.") return blade_root_dir
def find_blade_root_dir(working_dir): """find_blade_root_dir to find the dir holds the BLADE_ROOT file. The blade_root_dir is the directory which is the closest upper level directory of the current working directory, and containing a file named BLADE_ROOT. """ blade_root_dir = working_dir if blade_root_dir.endswith('/'): blade_root_dir = blade_root_dir[:-1] while blade_root_dir and blade_root_dir != "/": if os.path.isfile(os.path.join(blade_root_dir, "BLADE_ROOT")): break blade_root_dir = os.path.dirname(blade_root_dir) if not blade_root_dir or blade_root_dir == "/": error_exit( "Can't find the file 'BLADE_ROOT' in this or any upper directory.\n" "Blade need this file as a placeholder to locate the root source directory " "(aka the directory where you #include start from).\n" "You should create it manually at the first time.") return blade_root_dir
def _prepare_test_env(self, target): """Prepare the test environment. """ shutil.rmtree(self._runfiles_dir(target), ignore_errors=True) os.mkdir(self._runfiles_dir(target)) # add build profile symlink profile_link_name = os.path.basename(self.build_dir) os.symlink(os.path.abspath(self.build_dir), os.path.join(self._runfiles_dir(target), profile_link_name)) # add pre build library symlink for prebuilt_file in self._get_prebuilt_files(target): os.symlink(os.path.abspath(prebuilt_file[0]), os.path.join(self._runfiles_dir(target), prebuilt_file[1])) link_name_list = [] for i in target['options']['testdata']: if isinstance(i, tuple): data_target = i[0] link_name = i[1] else: data_target = link_name = i if '..' in data_target: continue if link_name.startswith('//'): link_name = link_name[2:] err_msg, item = self.__check_link_name(link_name, link_name_list) if err_msg == "AMBIGUOUS": error_exit("Ambiguous testdata of //%s:%s: %s, exit..." % ( target['path'], target['name'], link_name)) elif err_msg == "INCOMPATIBLE": error_exit("%s could not exist with %s in testdata of //%s:%s" % ( link_name, item, target['path'], target['name'])) link_name_list.append(link_name) try: os.makedirs(os.path.dirname('%s/%s' % ( self._runfiles_dir(target), link_name))) except os.error: pass if os.path.exists(os.path.abspath('%s/%s' % ( self._runfiles_dir(target), link_name))): error_exit("%s already existed, could not prepare testdata for " "//%s:%s" % (link_name, target['path'], target['name'])) if data_target.startswith('//'): warning("Test data not in the same directory with BUILD file") data_target = data_target[2:] os.symlink(os.path.abspath(data_target), '%s/%s' % (self._runfiles_dir(target), link_name)) else: os.symlink(os.path.abspath("%s/%s" % (target['path'], data_target)), '%s/%s' % (self._runfiles_dir(target), link_name))
def _check_plat_and_profile_options(self): """check platform and profile options. """ if (self.options.profile != 'debug' and self.options.profile != 'release'): error_exit("--profile must be 'debug' or 'release'.") if self.options.m is None: self.options.m = self._arch_bits() else: if not (self.options.m == "32" or self.options.m == "64"): error_exit("--m must be '32' or '64'") # TODO(phongchen): cross compile checking if self.options.m == "64" and platform.machine() != "x86_64": error_exit("Sorry, 64-bit environment is required for " "building 64-bit targets.")
def parse(self): """load the configuration file and parse. """ try: blade_conf = os.path.join(os.path.dirname(sys.argv[0]), "blade.conf") if os.path.exists(blade_conf): execfile(blade_conf) except: error_exit("Parse error in config file blade.conf, exit...\n%s" % traceback.format_exc()) try: bladerc_file = os.path.expanduser("~/.bladerc") if os.path.exists(bladerc_file): execfile(bladerc_file) except: error_exit("Parse error in config file bladerc, exit...\n%s" % traceback.format_exc()) try: execfile(os.path.join(self.current_source_dir, 'BLADE_ROOT')) except: error_exit("Parse error in config file BLADE_ROOT, exit...\n%s" % traceback.format_exc())
def _main(blade_path): """The main entry of blade. """ cmd_options = CmdArguments() command = cmd_options.get_command() targets = cmd_options.get_targets() global query_targets global run_target if command == 'query': query_targets = list(targets) if command == 'run': run_target = targets[0] if not targets: targets = ['.'] options = cmd_options.get_options() # Set current_source_dir to the directory which contains the # file BLADE_ROOT, is upper than and is closest to the current # directory. Set working_dir to current directory. working_dir = get_cwd() current_source_dir = find_blade_root_dir(working_dir) os.chdir(current_source_dir) if current_source_dir != working_dir: # This message is required by vim quickfix mode if pwd is changed during # the building, DO NOT change the pattern of this message. print "Blade: Entering directory `%s'" % current_source_dir # Init global configuration manager configparse.blade_config = BladeConfig(current_source_dir) configparse.blade_config.parse() # Init global blade manager. current_building_path = "build%s_%s" % (options.m, options.profile) lock_file_fd = None locked_scons = False try: lock_file_fd = open('.SConstruct.lock', 'w') old_fd_flags = fcntl.fcntl(lock_file_fd.fileno(), fcntl.F_GETFD) fcntl.fcntl(lock_file_fd.fileno(), fcntl.F_SETFD, old_fd_flags | fcntl.FD_CLOEXEC) (locked_scons, ret_code) = lock_file(lock_file_fd.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) if not locked_scons: if ret_code == errno.EAGAIN: error_exit("There is already an active building in current source " "dir tree,\n" "or make sure there is no SConstruct file existed with " "BLADE_ROOT. Blade will exit...") else: error_exit("Lock exception, please try it later.") if command == 'query' and ( hasattr(options, 'depended') and options.depended): targets = ['...'] blade.blade = Blade(targets, blade_path, working_dir, current_building_path, current_source_dir, options, blade_command=command) # Build the targets blade.blade.generate() # Flush the printing sys.stdout.flush() sys.stderr.flush() # Tune the jobs num if command in ['build', 'run', 'test']: options.jobs = blade.blade.tune_parallel_jobs_num() # Switch case due to different sub command action = { 'build' : build, 'run' : run, 'test' : test, 'clean' : clean, 'query' : query }[command](options) return action finally: if (hasattr(options, 'scons_only') and not options.scons_only) or ( command == 'clean' or command == 'query' ): try: if locked_scons: os.remove(os.path.join(current_source_dir, 'SConstruct')) unlock_file(lock_file_fd.fileno()) lock_file_fd.close() except Exception as inst: pass return 0
def _check_name(self): if '/' in self.data.get('name', ''): error_exit('//%s:%s: Invalid target name, should not contain dir part.' % ( self.data['path'], self.data['name']))
def _prepare_to_generate_rule(self): """Should be overridden. """ error_exit("_prepare_to_generate_rule should be overridden in subclasses")
def _clone_env(self): """Clone target's environment. """ error_exit("_clone_env should be overridden in subclasses")
def generate_python_binary(target, source, env): setup_file = '' if not str(source[0]).endswith("setup.py"): warning("setup.py not existed to generate target %s, " "blade will generate a default one for you" % str(target[0])) else: setup_file = str(source[0]) init_file = '' source_index = 2 if not setup_file: source_index = 1 init_file = str(source[0]) else: init_file = str(source[1]) init_file_dir = os.path.dirname(init_file) dep_source_list = [] for s in source[source_index:]: dep_source_list.append(str(s)) target_file = str(target[0]) target_file_dir_list = target_file.split('/') target_profile = target_file_dir_list[0] target_dir = '/'.join(target_file_dir_list[0:-1]) if not os.path.exists(target_dir): os.makedirs(target_dir) if setup_file: shutil.copyfile(setup_file, os.path.join(target_dir, 'setup.py')) else: target_name = os.path.basename(init_file_dir) if not target_name: error_exit("invalid package for target %s" % str(target[0])) # generate default setup.py for user setup_str = """ #!/usr/bin/env python # This file was generated by blade from setuptools import find_packages, setup setup( name='%s', version='0.1.0', packages=find_packages(), zip_safe=True ) """ % target_name default_setup_file = open(os.path.join(target_dir, 'setup.py'), "w") default_setup_file.write(setup_str) default_setup_file.close() package_dir = os.path.join(target_profile, init_file_dir) if os.path.exists(package_dir): shutil.rmtree(package_dir, ignore_errors=True) cmd = "cp -r %s %s" % (init_file_dir, target_dir) p = subprocess.Popen( cmd, env={}, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to copy source files from %s to %s" % ( init_file_dir, target_dir)) return p.returncode # copy file to package_dir for f in dep_source_list: dep_file_basename = os.path.basename(f) dep_file_dir = os.path.dirname(f) sub_dir = '' sub_dir_list = dep_file_dir.split('/') if len(sub_dir_list) > 1: sub_dir = '/'.join(dep_file_dir.split('/')[1:]) if sub_dir: package_sub_dir = os.path.join(package_dir, sub_dir) if not os.path.exists(package_sub_dir): os.makedirs(package_sub_dir) sub_init_file = os.path.join(package_sub_dir, "__init__.py") if not os.path.exists(sub_init_file): sub_f = open(sub_init_file, "w") sub_f.close() shutil.copyfile(f, os.path.join(package_sub_dir, dep_file_basename)) make_egg_cmd = "python setup.py bdist_egg" p = subprocess.Popen( make_egg_cmd, env={}, cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to generate python binary in %s" % target_dir) return p.returncode return 0
def _check_query_options(self): """check query action options. """ if not self.options.deps and not self.options.depended: error_exit( "please specify --deps, --depended or both to query target")
def load_targets(target_ids, working_dir, blade_root_dir, blade): """load_targets. Parse and load targets, including those specified in command line and their direct and indirect dependencies, by loading related BUILD files. Returns a map which contains all these targets. """ target_database = blade.get_target_database() # targets specified in command line cited_targets = set() # cited_targets and all its dependencies related_targets = {} # source dirs mentioned in command line source_dirs = [] # to prevent duplicated loading of BUILD files processed_source_dirs = set() direct_targets = [] all_command_targets = [] # Parse command line target_ids. For those in the form of <path>:<target>, # record (<path>,<target>) in cited_targets; for the rest (with <path> # but without <target>), record <path> into paths. for target_id in target_ids: if target_id.find(':') == -1: source_dir, target_name = target_id, '*' else: source_dir, target_name = target_id.rsplit(':', 1) source_dir = relative_path(os.path.join(working_dir, source_dir), blade_root_dir) if target_name != '*' and target_name != '': cited_targets.add((source_dir, target_name)) elif source_dir.endswith('...'): source_dir = source_dir[:-3] if not source_dir: source_dir = "./" source_dirs.append((source_dir, WARN_IF_FAIL)) for root, dirs, files in os.walk(source_dir): # Skip over subdirs starting with '.', e.g., .svn. dirs[:] = [d for d in dirs if not d.startswith('.')] for d in dirs: source_dirs.append((os.path.join(root, d), IGNORE_IF_FAIL)) else: source_dirs.append((source_dir, ABORT_IF_FAIL)) direct_targets = list(cited_targets) # Load BUILD files in paths, and add all loaded targets into # cited_targets. Together with above step, we can ensure that all # targets mentioned in the command line are now in cited_targets. for source_dir, action_if_fail in source_dirs: _load_build_file(source_dir, action_if_fail, processed_source_dirs, blade) for key in target_database: cited_targets.add(key) all_command_targets = list(cited_targets) # Starting from targets specified in command line, breath-first # propagate to load BUILD files containing directly and indirectly # dependent targets. All these targets form related_targets, # which is a subset of target_databased created by loading BUILD files. while cited_targets: source_dir, target_name = cited_targets.pop() target_id = (source_dir, target_name) if target_id in related_targets: continue _load_build_file(source_dir, ABORT_IF_FAIL, processed_source_dirs, blade) if target_id not in target_database: error_exit("%s: target //%s:%s does not exists" % ( _find_depender(target_id, blade), source_dir, target_name)) related_targets[target_id] = target_database[target_id] for key in related_targets[target_id]['deps']: if key not in related_targets: cited_targets.add(key) # Iterating to get svn root dirs for path, name in related_targets: root_dir = path.split("/")[0].strip() if root_dir not in blade.svn_root_dirs and '#' not in root_dir: blade.svn_root_dirs.append(root_dir) blade.set_related_targets(related_targets) return direct_targets, all_command_targets
def _check_name(self): if '/' in self.data.get('name', ''): error_exit( '//%s:%s: Invalid target name, should not contain dir part.' % (self.data['path'], self.data['name']))
def generate_python_binary(target, source, env): setup_file = '' if not str(source[0]).endswith("setup.py"): warning("setup.py not existed to generate target %s, " "blade will generate a default one for you" % str(target[0])) else: setup_file = str(source[0]) init_file = '' source_index = 2 if not setup_file: source_index = 1 init_file = str(source[0]) else: init_file = str(source[1]) init_file_dir = os.path.dirname(init_file) dep_source_list = [] for s in source[source_index:]: dep_source_list.append(str(s)) target_file = str(target[0]) target_file_dir_list = target_file.split('/') target_profile = target_file_dir_list[0] target_dir = '/'.join(target_file_dir_list[0:-1]) if not os.path.exists(target_dir): os.makedirs(target_dir) if setup_file: shutil.copyfile(setup_file, os.path.join(target_dir, 'setup.py')) else: target_name = os.path.basename(init_file_dir) if not target_name: error_exit("invalid package for target %s" % str(target[0])) # generate default setup.py for user setup_str = """ #!/usr/bin/env python # This file was generated by blade from setuptools import find_packages, setup setup( name='%s', version='0.1.0', packages=find_packages(), zip_safe=True ) """ % target_name default_setup_file = open(os.path.join(target_dir, 'setup.py'), "w") default_setup_file.write(setup_str) default_setup_file.close() package_dir = os.path.join(target_profile, init_file_dir) if os.path.exists(package_dir): shutil.rmtree(package_dir, ignore_errors=True) cmd = "cp -r %s %s" % (init_file_dir, target_dir) p = subprocess.Popen(cmd, env={}, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to copy source files from %s to %s" % (init_file_dir, target_dir)) return p.returncode # copy file to package_dir for f in dep_source_list: dep_file_basename = os.path.basename(f) dep_file_dir = os.path.dirname(f) sub_dir = '' sub_dir_list = dep_file_dir.split('/') if len(sub_dir_list) > 1: sub_dir = '/'.join(dep_file_dir.split('/')[1:]) if sub_dir: package_sub_dir = os.path.join(package_dir, sub_dir) if not os.path.exists(package_sub_dir): os.makedirs(package_sub_dir) sub_init_file = os.path.join(package_sub_dir, "__init__.py") if not os.path.exists(sub_init_file): sub_f = open(sub_init_file, "w") sub_f.close() shutil.copyfile(f, os.path.join(package_sub_dir, dep_file_basename)) make_egg_cmd = "python setup.py bdist_egg" p = subprocess.Popen(make_egg_cmd, env={}, cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to generate python binary in %s" % target_dir) return p.returncode return 0
def load_targets(target_ids, working_dir, blade_root_dir, blade): """load_targets. Parse and load targets, including those specified in command line and their direct and indirect dependencies, by loading related BUILD files. Returns a map which contains all these targets. """ target_database = blade.get_target_database() # targets specified in command line cited_targets = set() # cited_targets and all its dependencies related_targets = {} # source dirs mentioned in command line source_dirs = [] # to prevent duplicated loading of BUILD files processed_source_dirs = set() direct_targets = [] all_command_targets = [] # Parse command line target_ids. For those in the form of <path>:<target>, # record (<path>,<target>) in cited_targets; for the rest (with <path> # but without <target>), record <path> into paths. for target_id in target_ids: if target_id.find(':') == -1: source_dir, target_name = target_id, '*' else: source_dir, target_name = target_id.rsplit(':', 1) source_dir = relative_path(os.path.join(working_dir, source_dir), blade_root_dir) if target_name != '*' and target_name != '': cited_targets.add((source_dir, target_name)) elif source_dir.endswith('...'): source_dir = source_dir[:-3] if not source_dir: source_dir = "./" source_dirs.append((source_dir, WARN_IF_FAIL)) for root, dirs, files in os.walk(source_dir): # Skip over subdirs starting with '.', e.g., .svn. dirs[:] = [d for d in dirs if not d.startswith('.')] for d in dirs: source_dirs.append((os.path.join(root, d), IGNORE_IF_FAIL)) else: source_dirs.append((source_dir, ABORT_IF_FAIL)) direct_targets = list(cited_targets) # Load BUILD files in paths, and add all loaded targets into # cited_targets. Together with above step, we can ensure that all # targets mentioned in the command line are now in cited_targets. for source_dir, action_if_fail in source_dirs: _load_build_file(source_dir, action_if_fail, processed_source_dirs, blade) for key in target_database: cited_targets.add(key) all_command_targets = list(cited_targets) # Starting from targets specified in command line, breath-first # propagate to load BUILD files containing directly and indirectly # dependent targets. All these targets form related_targets, # which is a subset of target_databased created by loading BUILD files. while cited_targets: source_dir, target_name = cited_targets.pop() target_id = (source_dir, target_name) if target_id in related_targets: continue _load_build_file(source_dir, ABORT_IF_FAIL, processed_source_dirs, blade) if target_id not in target_database: error_exit( "%s: target //%s:%s does not exists" % (_find_depender(target_id, blade), source_dir, target_name)) related_targets[target_id] = target_database[target_id] for key in related_targets[target_id]['deps']: if key not in related_targets: cited_targets.add(key) # Iterating to get svn root dirs for path, name in related_targets: root_dir = path.split("/")[0].strip() if root_dir not in blade.svn_root_dirs and '#' not in root_dir: blade.svn_root_dirs.append(root_dir) blade.set_related_targets(related_targets) return direct_targets, all_command_targets
def _prepare_to_generate_rule(self): """Should be overridden. """ error_exit( "_prepare_to_generate_rule should be overridden in subclasses")
def _check_query_options(self): """check query action options. """ if not self.options.deps and not self.options.depended: error_exit("please specify --deps, --depended or both to query target")
def __init__(self, targets, options, prebuilt_file_map={}, target_database={}): """Init method. """ self.targets = targets self.build_dir = "build%s_%s" % (options.m, options.profile) self.options = options self.run_list = ['cc_binary', 'dynamic_cc_binary', 'cc_test', 'dynamic_cc_test'] self.prebuilt_file_map = prebuilt_file_map self.target_database = target_database self.inctest_md5_file = ".blade.test.stamp" self.tests_detail_file = "./blade_tests_detail" self.run_all = False self.inctest_run_list = [] self.testarg_dict = {} self.env_dict = {} self.cur_testarg_dict = {} self.cur_env_dict = {} self.inctest_md5_buffer = [] self.target_dict = {} self.cur_target_dict = {} self.option_has_fulltest = False self.valid_inctest_time_interval = 86400 self.last_inctest_time_dict = {} self.this_inctest_time_dict = {} self.tests_run_map = {} self.run_all_reason = '' self.title_str = '='*13 self.skipped_tests = [] if hasattr(self.options, 'fulltest'): self.option_has_fulltest = True if self.option_has_fulltest and (not self.options.fulltest): if os.path.exists(self.inctest_md5_file): for line in open(self.inctest_md5_file): self.inctest_md5_buffer.append(line[:-1]) buf_len = len(self.inctest_md5_buffer) if buf_len < 2 and buf_len > 0 : if os.path.exists(self.inctest_md5_file): os.remove(self.inctest_md5_file) error_exit("bad incremental test md5 file, removed") if self.inctest_md5_buffer: self.testarg_dict = eval(self.inctest_md5_buffer[0]) self.env_dict = eval(self.inctest_md5_buffer[1]) if buf_len >= 3: self.target_dict = eval(self.inctest_md5_buffer[2]) if buf_len >= 4: self.last_inctest_time_dict = eval(self.inctest_md5_buffer[3]) if hasattr(self.options, 'testargs'): self.cur_testarg_dict['testarg'] = md5sum(self.options.testargs) else: self.cur_testarg_dict['testarg'] = None env_keys = os.environ.keys() env_keys = list(set(env_keys).difference(env_ignore_set)) env_keys.sort() env_dict = {} for env_key in env_keys: env_dict[env_key] = os.environ[env_key] self.cur_env_dict['env'] = env_dict self.this_inctest_time_dict['inctest_time'] = time.time() if self.option_has_fulltest and (not self.options.fulltest): if self.cur_testarg_dict['testarg'] != ( self.testarg_dict.get('testarg', None)): self.run_all = True self.run_all_reason = 'ARGUMENT' info("all tests will run due to test arguments changed") new_env = self.cur_env_dict['env'] old_env = self.env_dict.get('env', {}) if isinstance(old_env, str): # For old test record old_env = {} if new_env != old_env: self.run_all = True self.run_all_reason = 'ENVIRONMENT' (new, old) = _diff_env(new_env, old_env) info("all tests will run due to test environments changed:") if new: info("new environments: %s" % new) if old: info("old environments: %s" % old) this_time = int(round(self.this_inctest_time_dict['inctest_time'])) last_time = int(round(self.last_inctest_time_dict.get('inctest_time', 0))) interval = this_time - last_time if interval >= self.valid_inctest_time_interval or interval < 0: self.run_all = True self.run_all_reason = 'STALE' info("all tests will run due to all passed tests are invalid now") if self.option_has_fulltest and self.options.fulltest: self.run_all = True self.run_all_reason = 'FULLTEST'