def __init__(self, name, srcs, deps, type, out, blade, kwargs): srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, 'package', [], deps, None, blade, kwargs) if type not in _package_types: console.error_exit('%s: Invalid type %s. Types supported ' 'by the package are %s' % ( self.fullname, type, ', '.join(sorted(_package_types)))) self.data['type'] = type self.data['sources'], self.data['locations'] = [], [] self._process_srcs(srcs) if not out: out = '%s.%s' % (name, type) self.data['out'] = out
def _check_deps_in_build_file(self, name, deps): """_check_deps_in_build_file. Parameters ----------- name: the target's name deps: the deps list in BUILD file Returns ----------- None Description ----------- Checks that whether users' build file is consistent with blade's rule. """ for dep in deps: if not (dep.startswith(":") or dep.startswith("#") or dep.startswith("//") or dep.startswith("./")): console.error_exit("%s/%s: Invalid dep in %s." % (self.current_source_path, name, dep)) if dep.count(":") > 1: console.error_exit( "%s/%s: Invalid dep %s, missing ',' between 2 deps?" % (self.current_source_path, name, dep) )
def _report_not_exist(source_dir, path, blade): """Report dir or BUILD file does not exist. """ depender = _find_dir_depender(source_dir, blade) if depender: console.error_exit('//%s not found, required by %s' % (path, depender)) else: console.error_exit('//%s not found' % path)
def __init__(self, name, srcs, deps, main, base, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) PythonTarget.__init__(self, name, 'py_binary', srcs, deps, base, None, kwargs) self.data['run_in_shell'] = True if main: self.data['main'] = main else: if len(srcs) == 1: self.data['main'] = srcs[0] else: console.error_exit( '%s: The entry file must be specified by the "main" ' 'argument if there are more than one srcs' % self.fullname)
def __init__(self, name, type, srcs, deps, base, visibility, kwargs): """Init method. """ srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, type, srcs, deps, visibility, blade.blade, kwargs) if base: if not base.startswith('//'): console.error_exit('%s: Invalid base directory %s. Option base should ' 'be a directory starting with \'//\' from BLADE_ROOT directory.' % (self.fullname, base)) self.data['python_base'] = base[2:] self.data['python_sources'] = [self._source_file_path(s) for s in srcs]
def _normalize_one(target, working_dir): """Normalize target from command line form into canonical form. Target canonical form: dir:name dir: relative to blade_root_dir, use '.' for blade_root_dir name: name if target is dir:name '*' if target is dir '...' if target is dir/... """ if target.startswith('//'): target = target[2:] elif target.startswith('/'): console.error_exit('Invalid target "%s" starting from root path.' % target) else: if working_dir != '.': target = os.path.join(working_dir, target) if ':' in target: path, name = target.rsplit(':', 1) else: if target.endswith('...'): path = target[:-3] name = '...' else: path = target name = '*' path = os.path.normpath(path) return '%s:%s' % (path, name)
def _proto_go_rules(self): """Generate go files. """ env_name = self._env_name() var_name = self._var_name('go') go_home = configparse.blade_config.get_config('go_config')['go_home'] if not go_home: console.error_exit('%s: go_home is not configured in BLADE_ROOT.' % self.fullname) proto_config = configparse.blade_config.get_config( 'proto_library_config') proto_go_path = proto_config['protobuf_go_path'] self._write_rule('%s.Replace(PROTOBUFGOPATH="%s")' % (env_name, proto_go_path)) self._write_rule('%s = []' % var_name) for src in self.srcs: proto_src = os.path.join(self.path, src) go_src = self._proto_gen_go_file(src) go_src_var = self._var_name_of(src, 'go_src') self._write_rule('%s = %s.ProtoGo("%s", "%s")' % (go_src_var, env_name, go_src, proto_src)) # Copy the generated go sources to $GOPATH # according to the standard go directory layout proto_dir = os.path.dirname(src) proto_name = os.path.basename(src) go_dst = os.path.join(go_home, 'src', proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) go_dst_var = self._var_name_of(src, 'go_dst') self._write_rule('%s = %s.ProtoGoSource("%s", %s)' % (go_dst_var, env_name, go_dst, go_src_var)) self._write_rule('%s.append(%s)' % (var_name, go_dst_var)) self._add_target_var('go', var_name)
def __init__(self): """Init the class. """ (self.options, others) = self._cmd_parse() # If '--' in arguments, use all other arguments after it as run # arguments if '--' in others: pos = others.index('--') self.targets = others[:pos] self.options.args = others[pos + 1:] else: self.targets = others self.options.args = [] for t in self.targets: if t.startswith('-'): console.error_exit('unregconized option %s, use blade [action] ' '--help to get all the options' % t) command = self.options.command # Check the options with different sub command actions = { 'build': self._check_build_command, 'run': self._check_run_command, 'test': self._check_test_command, 'clean': self._check_clean_command, 'query': self._check_query_command } actions[command]()
def _find_all_deps(target_id, targets, deps_map_cache, root_targets=None): """_find_all_deps. Return all targets depended by target_id directly and/or indirectly. We need the parameter root_target_id to check loopy dependency. """ new_deps_list = deps_map_cache.get(target_id) if new_deps_list is not None: return new_deps_list if root_targets is None: root_targets = set() root_targets.add(target_id) new_deps_list = [] for d in targets[target_id].expanded_deps: # loop dependency if d in root_targets: err_msg = '' for t in root_targets: err_msg += '//%s:%s --> ' % (t[0], t[1]) console.error_exit('loop dependency found: //%s:%s --> [%s]' % ( d[0], d[1], err_msg)) _check_dep_visibility(target_id, d, targets) new_deps_list.append(d) new_deps_list += _find_all_deps(d, targets, deps_map_cache, root_targets) new_deps_list = _unique_deps(new_deps_list) deps_map_cache[target_id] = new_deps_list root_targets.remove(target_id) return new_deps_list
def _check_deps_in_build_file(self, deps): """_check_deps_in_build_file. Parameters ----------- name: the target's name deps: the deps list in BUILD file Returns ----------- None Description ----------- Checks that whether users' build file is consistent with blade's rule. """ name = self.data['name'] for dep in deps: if not (dep.startswith(':') or dep.startswith('#') or dep.startswith('//') or dep.startswith('./')): console.error_exit('%s/%s: Invalid dep in %s.' % ( self.data['path'], name, dep)) if dep.count(':') > 1: console.error_exit('%s/%s: Invalid dep %s, missing \',\' between 2 deps?' % (self.data['path'], name, dep))
def __init__(self, name, srcs, deps, type, out, shell, blade, kwargs): srcs = var_to_list(srcs) deps = var_to_list(deps) Target.__init__(self, name, 'package', [], deps, None, blade, kwargs) if type not in _package_types: console.error_exit('%s: Invalid type %s. Types supported ' 'by the package are %s' % ( self.fullname, type, ', '.join(sorted(_package_types)))) self.data['type'] = type self.data['sources'], self.data['locations'] = [], [] self._process_srcs(srcs) if not out: out = '%s.%s' % (name, type) self.data['out'] = out self.data['shell'] = shell
def md5sum_str(user_str): """md5sum of basestring. """ if not isinstance(user_str, basestring): console.error_exit('Not a valid basestring type to calculate md5.') m = md5.md5() m.update(user_str) return m.hexdigest()
def _generate_target_explict_dependency(self, target_files): """_generate_target_explict_dependency. Description ----------- Generates dependency relationship that two targets have no dependency but it really needs when user specify it in BUILD file. 1. gen_rule target should be needed by other targets """ if not target_files: return env_name = self._env_name() files = var_to_list(target_files) files_str = ",".join(["%s" % f for f in files]) if not self.blade.get_expanded(): console.error_exit("logic error in Blade, error in _generate_target_explict_dependency") targets = self.blade.get_all_targets_expanded() files_map = self.blade.get_gen_rule_files_map() deps = targets[self.key]["deps"] for d in deps: dep_target = targets[d] if dep_target["type"] == "gen_rule": srcs_list = files_map[(dep_target["path"], dep_target["name"])] if srcs_list: self._write_rule("%s.Depends([%s], [%s])" % (env_name, files_str, srcs_list))
def _check_srcs(self): """Check source files. Description ----------- It will warn if one file belongs to two different targets. """ config = configparse.blade_config.get_config('global_config') action = config.get('duplicated_source_action') allow_dup_src_type_list = ['cc_binary', 'cc_test'] for s in self.srcs: if '..' in s or s.startswith('/'): console.error_exit( '%s:%s Invalid source file path: %s. ' 'can only be relative path, and must in current directory ' 'or subdirectories' % (self.path, self.name, s)) src_key = os.path.normpath('%s/%s' % (self.path, s)) src_value = '%s %s:%s' % (self.type, self.path, self.name) if src_key in Target.__src_target_map: value_existed = Target.__src_target_map[src_key] # May insert multiple time in test because of not unloading module if (value_existed != src_value and not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.type in allow_dup_src_type_list)): message = 'Source file %s belongs to both %s and %s' % ( s, value_existed, src_value) if action == 'error': console.error_exit(message) elif action == 'warning': console.warning(message) elif action == 'none' or not action: pass Target.__src_target_map[src_key] = src_value
def _generate_target_explict_dependency(self, target_files): """_generate_target_explict_dependency. Description ----------- Generates dependency relationship that two targets have no dependency but it really needs when user specify it in BUILD file. 1. gen_rule target should be needed by other targets """ if not target_files: return env_name = self._env_name() files = var_to_list(target_files) files_str = ",".join(["%s" % f for f in files]) if not self.blade.get_expanded(): console.error_exit( "logic error in Blade, error in _generate_target_explict_dependency" ) targets = self.blade.get_all_targets_expanded() files_map = self.blade.get_gen_rule_files_map() deps = targets[self.key]['deps'] for d in deps: dep_target = targets[d] if dep_target['type'] == 'gen_rule': srcs_list = files_map[(dep_target['path'], dep_target['name'])] if srcs_list: self._write_rule("%s.Depends([%s], [%s])" % (env_name, files_str, srcs_list))
def _check_deps_in_build_file(self, name, deps): """_check_deps_in_build_file. Parameters ----------- name: the target's name deps: the deps list in BUILD file Returns ----------- None Description ----------- Checks that whether users' build file is consistent with blade's rule. """ for dep in deps: if not (dep.startswith(':') or dep.startswith('#') or dep.startswith('//') or dep.startswith('./')): console.error_exit('%s/%s: Invalid dep in %s.' % (self.current_source_path, name, dep)) if dep.count(':') > 1: console.error_exit( '%s/%s: Invalid dep %s, missing \',\' between 2 deps?' % (self.current_source_path, name, dep))
def cc_test_config(append=None, **kwargs): """cc_test_config section. """ heap_check = kwargs.get('heap_check') if heap_check is not None and heap_check not in HEAP_CHECK_VALUES: console.error_exit('cc_test_config: heap_check can only be in %s' % HEAP_CHECK_VALUES) blade_config.update_config('cc_test_config', append, kwargs)
def _find_all_deps(target_id, targets, deps_map_cache, root_targets=None): """_find_all_deps. Return all targets depended by target_id directly and/or indirectly. We need the parameter root_target_id to check loopy dependency. """ new_deps_list = deps_map_cache.get(target_id) if new_deps_list is not None: return new_deps_list if root_targets is None: root_targets = set() root_targets.add(target_id) new_deps_list = [] for d in targets[target_id].expanded_deps: # loop dependency if d in root_targets: err_msg = '' for t in root_targets: err_msg += '//%s:%s --> ' % (t[0], t[1]) console.error_exit('loop dependency found: //%s:%s --> [%s]' % (d[0], d[1], err_msg)) _check_dep_visibility(target_id, d, targets) new_deps_list.append(d) new_deps_list += _find_all_deps(d, targets, deps_map_cache, root_targets) new_deps_list = _unique_deps(new_deps_list) deps_map_cache[target_id] = new_deps_list root_targets.remove(target_id) return new_deps_list
def _normalize_target(target, working_dir): '''Normalize target from command line into canonical form. Target canonical form: dir:name dir: relative to blade_root_dir, use '.' for blade_root_dir name: name if target is dir:name '*' if target is dir '...' if target is dir/... ''' if target.startswith('//'): target = target[2:] elif target.startswith('/'): console.error_exit('Invalid target "%s" starting from root path.' % target) else: if working_dir != '.': target = os.path.join(working_dir, target) if ':' in target: path, name = target.rsplit(':', 1) else: if target.endswith('...'): path = target[:-3] name = '...' else: path = target name = '*' path = os.path.normpath(path) return '%s:%s' % (path, name)
def scons_rules(self): """scons_rules. This method should be implemented in subclass. """ console.error_exit('%s: should be subclassing' % self.type)
def protoc_plugin(**kwargs): """protoc_plugin. """ if 'name' not in kwargs: console.error_exit("Missing 'name' in protoc_plugin parameters: %s" % kwargs) config = blade_config.get_config('protoc_plugin_config') config[kwargs['name']] = ProtocPlugin(**kwargs)
def md5sum_str(user_str): """md5sum of basestring. """ m = hashlib.md5() if not isinstance(user_str, basestring): console.error_exit("not a valid basestring type to caculate md5") m.update(user_str) return m.hexdigest()
def cc_test_config(append=None, **kwargs): """cc_test_config section. """ heap_check = kwargs.get("heap_check") if heap_check and heap_check not in HEAP_CHECK_VALUES: console.error_exit("cc_test_config: heap_check can only be in %s" % HEAP_CHECK_VALUES) global blade_config blade_config.update_config("cc_test_config", append, kwargs)
def _check_config(self): """Check whether maven is configured correctly. """ if not self.__need_check_config: return if not self.__maven: console.error_exit('MavenCache was not configured') self.__need_check_config = False
def _check_run_targets(self): """check that run command should have only one target. """ err = False targets = [] if len(self.targets) == 0: err = True elif self.targets[0].find(':') == -1: err = True if err: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if self.options.command == 'run' and len(self.targets) > 1: console.warning( 'run command will only take one target to build and run') if self.targets[0].startswith('//'): targets.append(self.targets[0][2:]) else: targets.append(self.targets[0]) self.targets = targets if self.options.runargs: console.warning('--runargs has been deprecated, please put all run' ' arguments after a "--"') self.options.args = shlex.split( self.options.runargs) + self.options.args
def _check_srcs(self): """Check source files. Description ----------- It will warn if one file belongs to two different targets. """ config = configparse.blade_config.get_config('global_config') action = config.get('duplicated_source_action') allow_dup_src_type_list = ['cc_binary', 'cc_test'] for s in self.srcs: if '..' in s or s.startswith('/'): console.error_exit('%s:%s Invalid source file path: %s. ' 'can only be relative path, and must in current directory ' 'or subdirectories' % (self.path, self.name, s)) src_key = os.path.normpath('%s/%s' % (self.path, s)) src_value = '%s %s:%s' % ( self.type, self.path, self.name) if src_key in Target.__src_target_map: value_existed = Target.__src_target_map[src_key] # May insert multiple time in test because of not unloading module if (value_existed != src_value and not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.type in allow_dup_src_type_list)): message = 'Source file %s belongs to both %s and %s' % ( s, value_existed, src_value) if action == 'error': console.error_exit(message) elif action == 'warning': console.warning(message) elif action == 'none' or not action: pass Target.__src_target_map[src_key] = src_value
def _proto_go_rules(self): """Generate go files. """ env_name = self._env_name() var_name = self._var_name('go') go_home = configparse.blade_config.get_config('go_config')['go_home'] if not go_home: console.error_exit('%s: go_home is not configured in BLADE_ROOT.' % self.fullname) proto_config = configparse.blade_config.get_config('proto_library_config') proto_go_path = proto_config['protobuf_go_path'] self._write_rule('%s.Replace(PROTOBUFGOPATH="%s")' % (env_name, proto_go_path)) self._write_rule('%s = []' % var_name) for src in self.srcs: proto_src = os.path.join(self.path, src) go_src = self._proto_gen_go_file(src) go_src_var = self._var_name_of(src, 'go_src') self._write_rule('%s = %s.ProtoGo("%s", "%s")' % ( go_src_var, env_name, go_src, proto_src)) # Copy the generated go sources to $GOPATH # according to the standard go directory layout proto_dir = os.path.dirname(src) proto_name = os.path.basename(src) go_dst = os.path.join(go_home, 'src', proto_go_path, self.path, proto_dir, proto_name.replace('.', '_'), os.path.basename(go_src)) go_dst_var = self._var_name_of(src, 'go_dst') self._write_rule('%s = %s.ProtoGoSource("%s", %s)' % ( go_dst_var, env_name, go_dst, go_src_var)) self._write_rule('%s.append(%s)' % (var_name, go_dst_var)) self._add_target_var('go', var_name)
def global_config(append=None, **kwargs): """global_config section. """ duplicated_source_action = kwargs.get('duplicated_source_action') if duplicated_source_action not in __DUPLICATED_SOURCE_ACTION_VALUES: console.error_exit('Invalid global_config.duplicated_source_action ' 'value, can only be in %s' % __DUPLICATED_SOURCE_ACTION_VALUES) blade_config.update_config('global_config', append, kwargs)
def __init__(self): """Init the class. """ (self.options, others) = self._cmd_parse() # If '--' in arguments, use all other arguments after it as run arguments if '--' in others: pos = others.index('--') self.targets = others[:pos] self.options.args = others[pos+1:] else: self.targets = others self.options.args = [] for t in self.targets: if t.startswith('-'): console.error_exit("unregconized option %s, use blade [action] " "--help to get all the options" % t) command = self.options.command # Check the options with different sub command actions = { 'build' : self._check_build_command, 'run' : self._check_run_command, 'test' : self._check_test_command, 'clean' : self._check_clean_command, 'query' : self._check_query_command } actions[command]()
def generate_proto_rules(self): proto_config = config.get_section('proto_library_config') protoc = proto_config['protoc'] protoc_java = protoc if proto_config['protoc_java']: protoc_java = proto_config['protoc_java'] protobuf_incs = protoc_import_path_option( proto_config['protobuf_incs']) protobuf_java_incs = protobuf_incs if proto_config['protobuf_java_incs']: protobuf_java_incs = protoc_import_path_option( proto_config['protobuf_java_incs']) self._add_rule(''' protocflags = protoccpppluginflags = protocjavapluginflags = protocpythonpluginflags = ''') self.generate_rule( name='proto', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--cpp_out=%s ${protocflags} ${protoccpppluginflags} ${in}' % (protoc, protobuf_incs, self.build_dir), description='PROTOC ${in}') self.generate_rule( name='protojava', command='%s --proto_path=. %s --java_out=%s/`dirname ${in}` ' '${protocjavapluginflags} ${in}' % (protoc_java, protobuf_java_incs, self.build_dir), description='PROTOCJAVA ${in}') self.generate_rule(name='protopython', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--python_out=%s ${protocpythonpluginflags} ${in}' % (protoc, protobuf_incs, self.build_dir), description='PROTOCPYTHON ${in}') self.generate_rule( name='protodescriptors', command='%s --proto_path=. %s -I=`dirname ${first}` ' '--descriptor_set_out=${out} --include_imports ' '--include_source_info ${in}' % (protoc, protobuf_incs), description='PROTODESCRIPTORS ${in}') protoc_go_plugin = proto_config['protoc_go_plugin'] if protoc_go_plugin: go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit( 'go_home is not configured in either BLADE_ROOT or BLADE_ROOT.local.' ) outdir = os.path.join(go_home, 'src') subplugins = proto_config['protoc_go_subplugins'] if subplugins: go_out = 'plugins=%s:%s' % ('+'.join(subplugins), outdir) else: go_out = outdir self.generate_rule( name='protogo', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--plugin=protoc-gen-go=%s --go_out=%s ${in}' % (protoc, protobuf_incs, protoc_go_plugin, go_out), description='PROTOCGOLANG ${in}')
def download_pkg(source_dir): retry = 3 base_url, download_dir = find_pkg_dir(source_dir) meta = download(base_url + '/META', '/tmp/META') meta_map = parse_meta(meta) name = meta_map.get('name', '') md5 = meta_map.get('md5', '').lower() if not name: return False for i in range(retry): try: res = download(base_url + '/' + name, '/tmp/' + name) except HTTPError: console.error_exit("Can't find package `%s'"%(source_dir)) except URLError: console.error_exit("Can't connect to remote host") calc_md5 = hashlib.md5(res).hexdigest() if calc_md5 == md5: break time.sleep(2**i) else: return False ret = subprocess.call('tar -xf /tmp/%s -C %s'%(name, download_dir), shell=True) return ret == 0
def _try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): execfile(filename) except: console.error_exit("Parse error in config file %s, exit...\n%s" % (filename, traceback.format_exc()))
def _report_not_exist(source_dir, path, blade): """ Report dir or BUILD file does not exist """ depender = _find_dir_depender(source_dir, blade) if depender: console.error_exit('//%s not found, required by %s, exit...' % (path, depender)) else: console.error_exit('//%s not found, exit...' % path)
def protoc_plugin(**kwargs): """protoc_plugin. """ from proto_library_target import ProtocPlugin if 'name' not in kwargs: console.error_exit("Missing 'name' in protoc_plugin parameters: %s" % kwargs) section = _blade_config.get_section('protoc_plugin_config') section[kwargs['name']] = ProtocPlugin(**kwargs)
def ninja_cc_source(self, source): if source.endswith('.l') or source.endswith('.y'): return source + '.c' elif source.endswith('.ll') or source.endswith('.yy'): return source + '.cc' else: console.error_exit('%s: Unknown source %s' % (self.fullname, source))
def _try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): execfile(filename) except SystemExit: console.error_exit('Parse error in config file %s, exit...' % filename)
def _get_artifact_from_database(self, id, classifier): """get_artifact_from_database. """ self._check_config() self._check_id(id) if (id, classifier) not in self.__jar_database: if not self._download_artifact(id, classifier): console.error_exit('Download %s failed' % id) return self.__jar_database[(id, classifier)]
def _check_id(self, id): """Check if id is valid. """ parts = id.split(":") if len(parts) == 3: group, artifact, version = parts if group and artifact and version: return console.error_exit("Invalid id %s: Id should be group:artifact:version, " "such as jaxen:jaxen:1.1.6" % id)
def _compiler_target_arch(self): """Compiler(gcc) target architecture. """ arch = BuildPlatform._get_cc_target_arch() pos = arch.find('-') if pos == -1: console.error_exit('Unknown target architecture %s from gcc.' % arch) return arch[:pos]
def _try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): execfile(filename) except: console.error_exit('Parse error in config file %s, exit...\n%s' % (filename, traceback.format_exc()))
def lock_workspace(): lock_file_fd, ret_code = lock_file('.Building.lock') if lock_file_fd == -1: if ret_code == errno.EAGAIN: console.error_exit( 'There is already an active building in current source tree.') else: console.error_exit('Lock exception, please try it later.') return lock_file_fd
def _check_proto_srcs_name(self, srcs_list): """_check_proto_srcs_name. Checks whether the proto file's name ends with 'proto'. """ for src in srcs_list: if not src.endswith('.proto'): console.error_exit('Invalid proto file name %s' % src)
def _prepare_test_data(self, target): if 'testdata' not in target['options']: return link_name_list = [] for i in target['options']['testdata']: if isinstance(i, tuple): data_target = i[0] link_name = i[1] else: data_target = link_name = i if '..' in data_target: continue if link_name.startswith('//'): link_name = link_name[2:] err_msg, item = self.__check_link_name(link_name, link_name_list) if err_msg == "AMBIGUOUS": console.error_exit( "Ambiguous testdata of //%s:%s: %s, exit..." % (target['path'], target['name'], link_name)) elif err_msg == "INCOMPATIBLE": console.error_exit( "%s could not exist with %s in testdata of //%s:%s" % (link_name, item, target['path'], target['name'])) link_name_list.append(link_name) try: os.makedirs( os.path.dirname('%s/%s' % (self._runfiles_dir(target), link_name))) except OSError: pass symlink_name = os.path.abspath( '%s/%s' % (self._runfiles_dir(target), link_name)) symlink_valid = False if os.path.lexists(symlink_name): if os.path.exists(symlink_name): symlink_valid = True console.warning( "%s already existed, could not prepare " "testdata for //%s:%s" % (link_name, target['path'], target['name'])) else: os.remove(symlink_name) console.warning("%s already existed, but it is a broken " "symbolic link, blade will remove it and " "make a new one." % link_name) if data_target.startswith('//'): data_target = data_target[2:] dest_data_file = os.path.abspath(data_target) else: dest_data_file = os.path.abspath("%s/%s" % (target['path'], data_target)) if not symlink_valid: os.symlink(dest_data_file, '%s/%s' % (self._runfiles_dir(target), link_name))
def _check_color_options(self): """check color options. """ if self.options.color == 'yes': console.color_enabled = True elif self.options.color == 'no': console.color_enabled = False elif self.options.color == 'auto' or self.options.color is None: pass else: console.error_exit('--color can only be yes, no or auto.')
def _check_color_options(self): """check color options. """ if self.options.color == "yes": console.color_enabled = True elif self.options.color == "no": console.color_enabled = False elif self.options.color == "auto" or self.options.color is None: pass else: console.error_exit("--color can only be yes, no or auto.")
def query_dependency_tree(self, output_file): """Query the dependency tree of the specified targets. """ if self.__options.dependents: console.error_exit( 'only query --deps can be output as tree format') print(file=output_file) for key in self.__all_command_targets: self._query_dependency_tree(key, 0, self.__build_targets, output_file) print(file=output_file)
def register_target(self, target): """Register a target into blade target database. It is used to do quick looking. """ key = target.key # Check whether there is already a key in database if key in self.__target_database: console.error_exit('Target %s is duplicate in //%s/BUILD' % (target.name, target.path)) self.__target_database[key] = target
def scons_rules(self): """scons_rules. Parameters ----------- None Returns ----------- None Description ----------- It outputs the scons rules according to user options. """ self._clone_env() if self.data['type'] == 'prebuilt_py_binary': return env_name = self._env_name() setup_file = os.path.join(self.data['path'], "setup.py") python_package = os.path.join(self.data['path'], self.data['name']) init_file = os.path.join(python_package, '__init__.py') binary_files = [] if os.path.exists(setup_file): binary_files.append(setup_file) if not os.path.exists(init_file): console.error_exit("The __init__.py not existed in %s" % python_package) binary_files.append(init_file) dep_var_list = [] self.targets = self.blade.get_all_targets_expanded() for dep in self.targets[self.key]['deps']: if dep in self.blade.python_binary_dep_source_map.keys(): for f in self.blade.python_binary_dep_source_map[dep]: binary_files.append(f) for cmd in self.blade.python_binary_dep_source_cmd[dep]: dep_var_list.append(cmd) target_egg_file = "%s.egg" % self._target_file_path() python_binary_var = "%s_python_binary_var" % ( self._generate_variable_name(self.data['path'], self.data['name'])) self._write_rule("%s = %s.PythonBinary(['%s'], %s)" % ( python_binary_var, env_name, target_egg_file, binary_files)) for var in dep_var_list: self._write_rule("%s.Depends(%s, %s)" % ( env_name, python_binary_var, var))
def main(blade_path): exit_code = 0 try: exit_code = _main(blade_path) except SystemExit as e: exit_code = e.code except KeyboardInterrupt: console.error_exit("keyboard interrupted", -signal.SIGINT) except: console.error_exit(traceback.format_exc()) sys.exit(exit_code)
def register_scons_target(self, target_key, scons_target): """Register scons targets into the scons targets map. It is used to do quick looking. """ # check that whether there is already a key in database if target_key in self.scons_targets_map.keys(): console.error_exit( "target name %s is duplicate in //%s/BUILD" % ( target_key[1], target_key[0])) self.scons_targets_map[target_key] = scons_target