def _check_srcs(self): """Check source files. Description ----------- It will warn if one file belongs to two different targets. """ allow_dup_src_type_list = ['cc_binary', 'cc_test'] for s in self.srcs: if '..' in s or s.startswith('/'): raise Exception, ( 'Invalid source file path: %s. ' 'can only be relative path, and must in current directory or ' 'subdirectories') % s src_key = os.path.normpath('%s/%s' % (self.path, s)) src_value = '%s %s:%s' % (self.type, self.path, self.name) if src_key in Target.__src_target_map: value_existed = Target.__src_target_map[src_key] # May insert multiple time in test because of not unloading module if (value_existed != src_value and not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.type in allow_dup_src_type_list)): # Just warn here, not raising exception console.warning( 'Source file %s belongs to both %s and %s' % (s, Target.__src_target_map[src_key], src_value)) Target.__src_target_map[src_key] = src_value
def _get_opened_files(targets, blade_root_dir, working_dir): check_dir = set() opened_files = set() blade_root_dir = os.path.normpath(blade_root_dir) for target in targets: target = _normalize_target_path(target) d = os.path.dirname(target) if d in check_dir: return check_dir.add(d) output = [] if is_svn_client(blade_root_dir): full_target = os.path.normpath(os.path.join(working_dir, d)) top_dir = full_target[len(blade_root_dir) + 1:] output = os.popen('svn st %s' % top_dir).read().split('\n') else: (is_git, git_root, git_subdir) = is_git_client(blade_root_dir, target, working_dir) if is_git: os.chdir(git_root) status_cmd = 'git status --porcelain %s' % (git_subdir) output = os.popen(status_cmd).read().split('\n') else: console.warning('unknown source client type, NOT svn OR git') for f in output: seg = f.strip().split(' ') if seg[0] != 'M' and seg[0] != 'A': continue f = seg[len(seg) - 1] if f.endswith('.h') or f.endswith('.hpp') or f.endswith('.cc') or f.endswith('.cpp'): fullpath = os.path.join(os.getcwd(), f) opened_files.add(fullpath) return opened_files
def _get_version_info(self): """Gets svn root dir info. """ for root_dir in self.svn_roots: lc_all_env = os.environ lc_all_env["LC_ALL"] = "POSIX" root_dir_realpath = os.path.realpath(root_dir) svn_working_dir = os.path.dirname(root_dir_realpath) svn_dir = os.path.basename(root_dir_realpath) if not os.path.exists("%s/.svn" % root_dir): console.warning('"%s" is not under version control' % root_dir) continue p = subprocess.Popen( "svn info %s" % svn_dir, env=lc_all_env, cwd="%s" % svn_working_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, ) std_out, std_err = p.communicate() if p.returncode: console.warning("failed to get version control info in %s" % root_dir) else: self.svn_info_map[root_dir] = std_out.replace("\n", "\\n\\\n")
def _fast_link_helper(target, source, env, link_com): """fast link helper function. """ target_file = str(target[0]) prefix_str = 'blade_%s' % target_file.replace('/', '_').replace('.', '_') fd, temporary_file = tempfile.mkstemp(suffix='xianxian', prefix=prefix_str, dir=linking_tmp_dir) os.close(fd) sources = [] for s in source: sources.append(str(s)) link_com_str = link_com.substitute( FL_TARGET=temporary_file, FL_SOURCE=' '.join(sources)) p = subprocess.Popen( link_com_str, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if std_out: print std_out if std_err: print std_err if p.returncode == 0: shutil.move(temporary_file, target_file) if not os.path.exists(target_file): console.warning('failed to genreate %s in link on tmpfs mode' % target_file) else: _blade_action_postfunc('failed while fast linking') return p.returncode
def _get_version_info(self): """Gets svn root dir info. """ for root_dir in self.svn_roots: lc_all_env = os.environ lc_all_env['LC_ALL'] = 'POSIX' root_dir_realpath = os.path.realpath(root_dir) svn_working_dir = os.path.dirname(root_dir_realpath) svn_dir = os.path.basename(root_dir_realpath) if not os.path.exists('%s/.svn' % root_dir): console.warning('"%s" is not under version control' % root_dir) continue p = subprocess.Popen('svn info %s' % svn_dir, env=lc_all_env, cwd='%s' % svn_working_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) std_out, std_err = p.communicate() if p.returncode: console.warning('failed to get version control info in %s' % root_dir) else: self.svn_info_map[root_dir] = std_out.replace('\n', '\\n\\\n')
def _check_srcs(self): """Check source files. Description ----------- It will warn if one file belongs to two different targets. """ config = configparse.blade_config.get_config('global_config') action = config.get('duplicated_source_action') allow_dup_src_type_list = ['cc_binary', 'cc_test'] for s in self.srcs: if '..' in s or s.startswith('/'): console.error_exit( '%s:%s Invalid source file path: %s. ' 'can only be relative path, and must in current directory ' 'or subdirectories' % (self.path, self.name, s)) src_key = os.path.normpath('%s/%s' % (self.path, s)) src_value = '%s %s:%s' % (self.type, self.path, self.name) if src_key in Target.__src_target_map: value_existed = Target.__src_target_map[src_key] # May insert multiple time in test because of not unloading module if (value_existed != src_value and not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.type in allow_dup_src_type_list)): message = 'Source file %s belongs to both %s and %s' % ( s, value_existed, src_value) if action == 'error': console.error_exit(message) elif action == 'warning': console.warning(message) elif action == 'none' or not action: pass Target.__src_target_map[src_key] = src_value
def getJsonFile(path, category): if os.path.isfile(path): with open(path, "r") as f: content = f.read() #console.info("Reading file " + path) try: jsonStuff = json.loads(content) except Exception as e: console.error("No " + category + " avaiable. ") jsonStuff = None return jsonStuff else: console.warning("Couldn't find " + str(path)) with open(path, "w") as f: toFille = { category: [] } f.write(json.dumps(toFille)) console.info("File " + path + " created") return toFille
def _check_run_targets(self): """check that run command should have only one target. """ err = False targets = [] if len(self.targets) == 0: err = True elif self.targets[0].find(':') == -1: err = True if err: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if self.options.command == 'run' and len(self.targets) > 1: console.warning( 'run command will only take one target to build and run') if self.targets[0].startswith('//'): targets.append(self.targets[0][2:]) else: targets.append(self.targets[0]) self.targets = targets if self.options.runargs: console.warning('--runargs has been deprecated, please put all run' ' arguments after a "--"') self.options.args = shlex.split( self.options.runargs) + self.options.args
def _generate_java_coverage_report(self): config = configparse.blade_config.get_config('java_test_config') jacoco_home = config['jacoco_home'] coverage_reporter = config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = ['java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % ( coverage_reporter, jacoco_libs)] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd) if subprocess.call(cmd, shell=True): console.warning('Failed to generate java coverage report')
def cc_library(name, srcs=[], deps=[], visibility=None, warning='yes', defs=[], incs=[], export_incs=[], optimize=[], always_optimize=False, pre_build=False, prebuilt=False, link_all_symbols=False, deprecated=False, extra_cppflags=[], extra_linkflags=[], allow_undefined=False, secure=False, **kwargs): """cc_library target. """ target = CcLibrary(name, srcs, deps, visibility, warning, defs, incs, export_incs, optimize, always_optimize, prebuilt or pre_build, link_all_symbols, deprecated, extra_cppflags, extra_linkflags, allow_undefined, secure, blade.blade, kwargs) if pre_build: console.warning("//%s:%s: 'pre_build' has been deprecated, " "please use 'prebuilt'" % (target.path, target.name)) blade.blade.register_target(target)
def _fast_link_helper(target, source, env, link_com): """fast link helper function. """ target_file = str(target[0]) prefix_str = 'blade_%s' % target_file.replace('/', '_').replace('.', '_') fd, temporary_file = tempfile.mkstemp(suffix='xianxian', prefix=prefix_str, dir=linking_tmp_dir) os.close(fd) sources = [] for s in source: sources.append(str(s)) link_com_str = link_com.substitute(FL_TARGET=temporary_file, FL_SOURCE=' '.join(sources)) p = subprocess.Popen(link_com_str, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if std_out: print std_out if std_err: print std_err if p.returncode == 0: shutil.move(temporary_file, target_file) if not os.path.exists(target_file): console.warning('failed to genreate %s in link on tmpfs mode' % target_file) else: _blade_action_postfunc('failed while fast linking') return p.returncode
def __init__(self, blade_root_dir, distcc_hosts_list=[]): # ccache self.blade_root_dir = blade_root_dir self.ccache_installed = self._check_ccache_install() # distcc self.distcc_env_prepared = False self.distcc_installed = self._check_distcc_install() if distcc_hosts_list: self.distcc_host_list = distcc_hosts_list else: self.distcc_host_list = os.environ.get('DISTCC_HOSTS', '') if self.distcc_installed and self.distcc_host_list: self.distcc_env_prepared = True if self.distcc_installed and not self.distcc_host_list: console.warning("DISTCC_HOSTS not set but you have " "distcc installed, will just build locally") self.distcc_log_file = os.environ.get('DISTCC_LOG', '') if self.distcc_log_file: console.info("distcc log: %s" % self.distcc_log_file) # dccc self.dccc_env_prepared = True self.dccc_master = os.environ.get('MASTER_HOSTS', '') self.dccc_hosts_list = os.environ.get('DISTLD_HOSTS', '') self.dccc_installed = self._check_dccc_install() if self.dccc_installed: if not self.dccc_master and not self.dccc_hosts_list: self.dccc_env_prepared = False console.warning("MASTER_HOSTS and DISTLD_HOSTS not set " "but you have dccc installed, will just build locally") else: self.dccc_env_prepared = False self.rules_buf = []
def _check_srcs(self): """Check source files. Description ----------- It will warn if one file belongs to two different targets. """ allow_dup_src_type_list = ["cc_binary", "cc_test"] for s in self.srcs: if ".." in s or s.startswith("/"): raise Exception, ( "Invalid source file path: %s. " "can only be relative path, and must in current directory or " "subdirectories" ) % s src_key = os.path.normpath("%s/%s" % (self.path, s)) src_value = "%s %s:%s" % (self.type, self.path, self.name) if src_key in Target.__src_target_map: value_existed = Target.__src_target_map[src_key] # May insert multiple time in test because of not unloading module if value_existed != src_value and not ( value_existed.split(": ")[0] in allow_dup_src_type_list and self.type in allow_dup_src_type_list ): # Just warn here, not raising exception console.warning( "Source file %s belongs to both %s and %s" % (s, Target.__src_target_map[src_key], src_value) ) Target.__src_target_map[src_key] = src_value
def _get_opened_files(targets, blade_root_dir, working_dir): check_dir = set() opened_files = set() blade_root_dir = os.path.normpath(blade_root_dir) for target in targets: target = _normalize_target_path(target) d = os.path.dirname(target) if d in check_dir: return check_dir.add(d) output = [] if is_svn_client(blade_root_dir): full_target = os.path.normpath(os.path.join(working_dir, d)) top_dir = full_target[len(blade_root_dir) + 1:] output = os.popen('svn st %s' % top_dir).read().split('\n') else: (is_git, git_root, git_subdir) = is_git_client(blade_root_dir, target, working_dir) if is_git: os.chdir(git_root) status_cmd = 'git status --porcelain %s' % (git_subdir) output = os.popen(status_cmd).read().split('\n') else: console.warning('unknown source client type, NOT svn OR git') for f in output: seg = f.strip().split(' ') if seg[0] != 'M' and seg[0] != 'A': continue f = seg[len(seg) - 1] if f.endswith('.h') or f.endswith('.hpp') or f.endswith( '.cc') or f.endswith('.cpp'): fullpath = os.path.join(os.getcwd(), f) opened_files.add(fullpath) return opened_files
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = [ 'java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % (coverage_reporter, jacoco_libs) ] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd_str) if subprocess.call(cmd_str, shell=True): console.warning('Failed to generate java coverage report')
def _download_dependency(self, id, classifier): group, artifact, version = id.split(':') target_path = self._generate_jar_path(id) log, classpath = artifact + '__classpath.log', 'classpath.txt' log = os.path.join(target_path, log) if os.path.isfile(os.path.join(target_path, classpath)): if not version.endswith('-SNAPSHOT'): return True if os.path.isfile(log) and not self._is_log_expired(log): return True # if classifier: # id = '%s:%s' % (id, classifier) # # Currently analyzing dependencies of classifier jar # # usually fails. Here when there is no classpath.txt # # file but classpath.log exists, that means the failure # # of analyzing dependencies last time # if (not os.path.exists(os.path.join(target_path, classpath)) # and os.path.exists(log)): # return False console.info('Downloading %s dependencies...' % id) pom = os.path.join(target_path, artifact + '-' + version + '.pom') cmd = ' '.join([self.__maven, 'dependency:build-classpath', '-DincludeScope=runtime', '-Dmdep.outputFile=%s' % classpath]) cmd += ' -f %s > %s' % (pom, log) if subprocess.call(cmd, shell=True): console.warning('Error occurred when resolving %s dependencies. ' 'Check %s for more details.' % (id, log)) return False return True
def ninja_generate_resources(self): resources = self.data['resources'] locations = self.data['location_resources'] if not resources and not locations: return [] inputs, outputs = [], [] resources_dir = self._target_file_path() + '.resources' resources = self._process_regular_resources(resources) for src, dst in resources: inputs.append(src) outputs.append(os.path.join(resources_dir, dst)) targets = self.blade.get_build_targets() for key, type, dst in locations: path = targets[key]._get_target_file(type) if not path: console.warning('%s: Location %s %s is missing. Ignored.' % (self.fullname, key, type)) continue if not dst: dst = os.path.basename(path) inputs.append(path) outputs.append(os.path.join(resources_dir, dst)) if inputs: self.ninja_build(outputs, 'javaresource', inputs=inputs) return outputs
def _check_srcs(self): """Check source files. Description ----------- It will warn if one file belongs to two different targets. """ config = configparse.blade_config.get_config('global_config') action = config.get('duplicated_source_action') allow_dup_src_type_list = ['cc_binary', 'cc_test'] for s in self.srcs: if '..' in s or s.startswith('/'): console.error_exit('%s:%s Invalid source file path: %s. ' 'can only be relative path, and must in current directory ' 'or subdirectories' % (self.path, self.name, s)) src_key = os.path.normpath('%s/%s' % (self.path, s)) src_value = '%s %s:%s' % ( self.type, self.path, self.name) if src_key in Target.__src_target_map: value_existed = Target.__src_target_map[src_key] # May insert multiple time in test because of not unloading module if (value_existed != src_value and not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.type in allow_dup_src_type_list)): message = 'Source file %s belongs to both %s and %s' % ( s, value_existed, src_value) if action == 'error': console.error_exit(message) elif action == 'warning': console.warning(message) elif action == 'none' or not action: pass Target.__src_target_map[src_key] = src_value
def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ( "Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def gen_targets_rules(self): """Get the build rules and return to the object who queries this. """ rules_buf = [] skip_test = getattr(self.__options, 'no_test', False) skip_package = not getattr(self.__options, 'generate_package', False) for k in self.__sorted_targets_keys: target = self.__build_targets[k] if not self._is_scons_object_type(target.type): continue scons_object = self.__target_database.get(k, None) if not scons_object: console.warning('not registered scons object, key %s' % str(k)) continue if (skip_test and target.type.endswith('_test') and k not in self.__direct_targets): continue if (skip_package and target.type == 'package' and k not in self.__direct_targets): continue scons_object.scons_rules() rules = scons_object.get_rules() if rules: rules_buf.append('\n') rules_buf += rules return rules_buf
def _check_code_style(opened_files): if not opened_files: return 0 cpplint = configparse.blade_config.configs["cc_config"]["cpplint"] if not cpplint: console.info("cpplint disabled") return 0 console.info("Begin to check code style for changed source code") p = subprocess.Popen(("%s %s" % (cpplint, " ".join(opened_files))), shell=True) try: p.wait() if p.returncode: if p.returncode == 127: msg = ( "Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct." ).format(cpplint) else: msg = "Please fixing style warnings before submitting the code!" console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def cc_plugin(name, srcs=[], deps=[], warning='yes', defs=[], incs=[], export_incs=[], optimize=[], prebuilt=False, pre_build=False, extra_cppflags=[], extra_linkflags=[], **kwargs): """cc_plugin target. """ target = CcPlugin(name, srcs, deps, warning, defs, incs, export_incs, optimize, prebuilt or pre_build, extra_cppflags, extra_linkflags, blade.blade, kwargs) if pre_build: console.warning("//%s:%s: 'pre_build' has been deprecated, " "please use 'prebuilt'" % (target.path, target.name)) blade.blade.register_target(target)
def SlopeReclass(bassin, zone, workdir, overwrite): """ Reclassification des pentes en 4 classes """ from qgis_helper import execute from processing.tools.system import getTempDirInTempFolder dem = os.path.join(workdir, bassin, zone, 'DEM5M.tif') # slope = os.path.join(workdir, bassin, zone, 'SLOPE.tif') output = os.path.join(workdir, bassin, zone, 'SLOPE_CLS.tif') if os.path.exists(output) and not overwrite: important('Output already exists : %s' % output) return info('Smooth DEM using 5x5 mean filter') parameters = dict(input=dem, bands=[1], filter_type=0, size=5, output=os.path.join(getTempDirInTempFolder(), 'SMOOTHED.tif')) result = execute("fct:simplerasterfilter", **parameters) if 'OUTPUT' not in result: warning('Error :(') return info('Calculate slope') parameters = dict(input=result['OUTPUT'], z_factor=1, output=os.path.join(getTempDirInTempFolder(), 'SLOPE.tif')) result = execute("qgis:slope", **parameters) if 'OUTPUT' not in result: warning('Error :(') return info('Reclass slopes') parameters = dict(input_raster=result['OUTPUT'], raster_band=1, table=[0, 2, 1, 2, 6, 2, 6, 12, 3, 12, None, 4], no_data=0, range_boundaries=1, nodata_for_missing=True, data_type=0, output=output) result = execute('native:reclassifybytable', **parameters) if 'OUTPUT' in result: success('Saved to %s' % result['OUTPUT'])
def cc_config(append=None, **kwargs): """extra cc config, like extra cpp include path splited by space. """ if "extra_incs" in kwargs: extra_incs = kwargs["extra_incs"] if isinstance(extra_incs, basestring) and " " in extra_incs: console.warning("%s: cc_config: extra_incs has been changed to list" % blade_config.current_file_name) kwargs["extra_incs"] = extra_incs.split() blade_config.update_config("cc_config", append, kwargs)
def _check_inctest_md5sum_file(self): """check the md5sum file size, remove it when it is too large. It is 2G by default. """ if os.path.exists(self.inctest_md5_file): if os.path.getsize(self.inctest_md5_file) > 2 * 1024 * 1024 * 1024: console.warning("Will remove the md5sum file for incremental " "test for it is oversized") os.remove(self.inctest_md5_file)
def _check_test_options(self): """check that test command options. """ if self.options.testargs: console.warning( '--testargs has been deprecated, please put all test' ' arguments after a "--" ') self.options.args = shlex.split( self.options.testargs) + self.options.args
def _show_slow_tests(self, passed_run_results, failed_run_results): slow_tests = (self._collect_slow_tests(passed_run_results) + self._collect_slow_tests(failed_run_results)) if slow_tests: console.warning('%d slow tests:' % len(slow_tests)) for cost_time, key in sorted(slow_tests): console.warning('%.4gs\t//%s:%s' % (cost_time, key[0], key[1]), prefix=False)
def java_jar(name, srcs=[], deps=[], prebuilt=False, pre_build=False, **kwargs): """Define java_jar target. """ target = JavaJarTarget(name, srcs, deps, prebuilt or pre_build, blade.blade, kwargs) if pre_build: console.warning( '//%s:%s: "pre_build" has been deprecated, ' 'please use "prebuilt"' % (target.path, target.name) ) blade.blade.register_target(target)
def fileSetup(): if not os.path.isdir("secure"): os.mkdir("secure") console.info("Folder 'secure' created") if not os.path.isdir("secure/files"): os.mkdir("secure/files") console.info("Folder 'secure/files' created") if not os.path.isdir("data"): os.mkdir("data") console.info("Folder 'data' created") files = [] allFiles = ["secure/logins.json", "secure/computers.json", "secure/emails.json", "secure/creditcards.json", "secure/notes.json"] for file in os.listdir('secure'): path = "secure/" + file if os.path.isfile(path): files.append(path) with open(path, "r") as f: content = f.read() console.info("Checking file " + path) try: jsonStuff = json.loads(content) except Exception as e: console.warning("Could not load file " + path) with open(path, "w") as f: toFile = { file.replace('.json',''): [] } f.write(json.dumps(toFile)) console.info("File " + path + " created") for file in files: for otherFile in allFiles: if file == otherFile: allFiles.remove(otherFile) for missingFile in allFiles: console.warning("Could not find file " + missingFile) with open(missingFile, "w") as f: toFile = { os.path.basename(missingFile).replace('.json',''): [] } f.write(json.dumps(toFile)) console.info("File " + missingFile + " created")
def java_jar(name, srcs=[], deps=[], prebuilt=False, pre_build=False, **kwargs): """scons_java_jar. """ target = JavaJarTarget(name, srcs, deps, prebuilt or pre_build, blade.blade, kwargs) if pre_build: console.warning( "//%s:%s: 'pre_build' has been deprecated, " "please use 'prebuilt'" % (target.data["path"], target.data["name"]) ) blade.blade.register_scons_target(target.key, target)
def cc_config(append=None, **kwargs): """extra cc config, like extra cpp include path splited by space. """ if 'extra_incs' in kwargs: extra_incs = kwargs['extra_incs'] if isinstance(extra_incs, basestring) and ' ' in extra_incs: console.warning('%s: cc_config: extra_incs has been changed to list' % blade_config.current_file_name) kwargs['extra_incs'] = extra_incs.split() blade_config.update_config('cc_config', append, kwargs)
def _check_inctest_md5sum_file(self): """check the md5sum file size, remove it when it is too large. It is 2G by default. """ if os.path.exists(self.inctest_md5_file): if os.path.getsize(self.inctest_md5_file) > 2 * 1024 * 1024 * 1024: console.warning('Will remove the md5sum file for incremental ' 'test for it is oversized') os.remove(self.inctest_md5_file)
def _prepare_test_data(self, target): if 'testdata' not in target['options']: return link_name_list = [] for i in target['options']['testdata']: if isinstance(i, tuple): data_target = i[0] link_name = i[1] else: data_target = link_name = i if '..' in data_target: continue if link_name.startswith('//'): link_name = link_name[2:] err_msg, item = self.__check_link_name(link_name, link_name_list) if err_msg == "AMBIGUOUS": console.error_exit( "Ambiguous testdata of //%s:%s: %s, exit..." % (target['path'], target['name'], link_name)) elif err_msg == "INCOMPATIBLE": console.error_exit( "%s could not exist with %s in testdata of //%s:%s" % (link_name, item, target['path'], target['name'])) link_name_list.append(link_name) try: os.makedirs( os.path.dirname('%s/%s' % (self._runfiles_dir(target), link_name))) except OSError: pass symlink_name = os.path.abspath( '%s/%s' % (self._runfiles_dir(target), link_name)) symlink_valid = False if os.path.lexists(symlink_name): if os.path.exists(symlink_name): symlink_valid = True console.warning( "%s already existed, could not prepare " "testdata for //%s:%s" % (link_name, target['path'], target['name'])) else: os.remove(symlink_name) console.warning("%s already existed, but it is a broken " "symbolic link, blade will remove it and " "make a new one." % link_name) if data_target.startswith('//'): data_target = data_target[2:] dest_data_file = os.path.abspath(data_target) else: dest_data_file = os.path.abspath("%s/%s" % (target['path'], data_target)) if not symlink_valid: os.symlink(dest_data_file, '%s/%s' % (self._runfiles_dir(target), link_name))
def cc_config(append=None, **kwargs): """extra cc config, like extra cpp include path splited by space. """ global blade_config if 'extra_incs' in kwargs: extra_incs = kwargs['extra_incs'] if isinstance(extra_incs, basestring) and ' ' in extra_incs: console.warning('%s: cc_config: extra_incs has been changed to list' % blade_config.current_file_name) kwargs['extra_incs'] = extra_incs.split() blade_config.update_config('cc_config', append, kwargs)
def ninja_rules(self): if not self.srcs: console.warning('%s: Empty scala test sources.' % self.fullname) return jar = self.ninja_generate_jar() output = self._target_file_path() dep_jars, maven_jars = self._get_test_deps() self.ninja_build(output, 'scalatest', inputs=[jar] + dep_jars + maven_jars)
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, testdata, kwargs): ScalaFatLibrary.__init__(self, name, srcs, deps, resources, source_encoding, warnings, [], kwargs) self.type = 'scala_test' self.data['testdata'] = var_to_list(testdata) scalatest_libs = config.get_item('scala_test_config', 'scalatest_libs') if scalatest_libs: self._add_hardcode_java_library(scalatest_libs) else: console.warning('scalatest jar was not configured')
def generate_fat_jar(target, jars): """Generate a fat jar containing the contents of all the jar dependencies. """ target_dir = os.path.dirname(target) if not os.path.exists(target_dir): os.makedirs(target_dir) target_fat_jar = zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED) # Record paths written in the fat jar to avoid duplicate writing path_jar_dict = {} conflict_logs = [] for dep_jar in jars: jar = zipfile.ZipFile(dep_jar, 'r') name_list = jar.namelist() for name in name_list: if name.endswith('/') or not _is_fat_jar_excluded(name): if name not in path_jar_dict: target_fat_jar.writestr(name, jar.read(name)) path_jar_dict[name] = os.path.basename(dep_jar) else: if name.endswith('/'): continue message = ('%s: duplicated path %s found in {%s, %s}' % (target, name, path_jar_dict[name], os.path.basename(dep_jar))) # Always log all conflicts for diagnosis if console_logging: console.debug(message) if '/.m2/repository/' not in dep_jar: # There are too many conflicts between maven jars, # so we have to ignore them, only count source code conflicts conflict_logs.append(message) jar.close() if conflict_logs: log = '%s: Found %d conflicts when packaging.' % (target, len(conflict_logs)) if console_logging: console.warning(log) else: print >> sys.stdout, log print >> sys.stderr, '\n'.join(conflict_logs) # TODO(wentingli): Create manifest from dependency jars later if needed contents = [ 'Manifest-Version: 1.0', 'Created-By: Python.Zipfile (Blade)', 'Built-By: %s' % os.getenv('USER'), 'Build-Time: %s' % time.asctime(), ] contents += _manifest_scm(target.split(os.sep)[0]) contents.append('\n') target_fat_jar.writestr(_JAR_MANIFEST, '\n'.join(contents)) target_fat_jar.close()
def _check_deprecated_deps(self): """Check whether it depends upon a deprecated library. """ for key in self.deps: dep = self.target_database.get(key) if dep and dep.data.get('deprecated'): replaced_deps = dep.deps if replaced_deps: console.warning('%s: //%s has been deprecated, ' 'please depends on //%s:%s' % ( self.fullname, dep.fullname, replaced_deps[0][0], replaced_deps[0][1]))
def _replace_config(self, section_name, config, user_config): """Replace config section items""" for k in user_config.keys(): if k in config: if isinstance(config[k], list): user_config[k] = var_to_list(user_config[k]) else: user_config[k] = user_config[k] else: console.warning("%s: %s: unknown config item name: %s" % (self.current_file_name, section_name, k)) del user_config[k] config.update(user_config)
def __init__(self, name, srcs, deps, resources, source_encoding, warnings, testdata, kwargs): ScalaFatLibrary.__init__(self, name, srcs, deps, resources, source_encoding, warnings, [], kwargs) self.type = 'scala_test' self.data['testdata'] = var_to_list(testdata) config = configparse.blade_config.get_config('scala_test_config') scalatest_libs = config['scalatest_libs'] if scalatest_libs: self._add_hardcode_java_library(scalatest_libs) else: console.warning('scalatest jar was not configured')
def _check_run_targets(self): """check that run command should have only one target. """ if not self.targets or ':' not in self.targets[0]: console.error_exit('Please specify a single target to run: ' 'blade run //target_path:target_name (or ' 'a_path:target_name)') if len(self.targets) > 1: console.warning('run command will only take one target to build and run') if self.options.runargs: console.warning('--runargs has been deprecated, please put all run' ' arguments after a "--"') self.options.args = shlex.split(self.options.runargs) + self.options.args
def generate_fat_jar(target, jars): """Generate a fat jar containing the contents of all the jar dependencies. """ target_dir = os.path.dirname(target) if not os.path.exists(target_dir): os.makedirs(target_dir) target_fat_jar = zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED) # Record paths written in the fat jar to avoid duplicate writing path_jar_dict = {} conflicts = [] for dep_jar in jars: jar = zipfile.ZipFile(dep_jar, 'r') name_list = jar.namelist() for name in name_list: if name.endswith('/') or not _is_fat_jar_excluded(name): if name not in path_jar_dict: target_fat_jar.writestr(name, jar.read(name)) path_jar_dict[name] = dep_jar else: if name.endswith('/'): continue message = ('%s: duplicate path %s found in {%s, %s}' % ( target, name, os.path.basename(path_jar_dict[name]), os.path.basename(dep_jar))) # Always log all conflicts for diagnosis console.debug(message) if '/.m2/repository/' not in dep_jar: # There are too many conflicts between maven jars, # so we have to ignore them, only count source code conflicts conflicts.append('\n'.join([ 'Path: %s' % name, 'From: %s' % path_jar_dict[name], 'Ignored: %s' % dep_jar, ])) jar.close() if conflicts: console.warning('%s: Found %d conflicts when packaging.' % (target, len(conflicts))) generate_fat_jar_metadata(target_fat_jar, jars, conflicts) contents = [ 'Manifest-Version: 1.0', 'Created-By: Python.Zipfile (Blade)', 'Built-By: %s' % os.getenv('USER'), 'Build-Time: %s' % time.asctime(), ] contents += _manifest_scm(target.split(os.sep)[0]) contents.append('\n') target_fat_jar.writestr(_JAR_MANIFEST, '\n'.join(contents)) target_fat_jar.close()
def _load_test_history(self): if os.path.exists(_TEST_HISTORY_FILE): try: with open(_TEST_HISTORY_FILE) as f: # pylint: disable=eval-used self.test_history = eval(f.read()) except (IOError, SyntaxError, NameError, TypeError): console.warning( 'error loading incremental test history, will run full test' ) if 'items' not in self.test_history: self.test_history['items'] = {}
def java_jar(name, srcs=[], deps=[], prebuilt=False, pre_build=False, **kwargs): """Define java_jar target. """ target = JavaJarTarget(name, srcs, deps, prebuilt or pre_build, blade.blade, kwargs) if pre_build: console.warning('//%s:%s: "pre_build" has been deprecated, ' 'please use "prebuilt"' % (target.path, target.name)) blade.blade.register_target(target)
def _check_defs(self): """_check_defs. It will warn if user defines cpp keyword in defs list. """ defs_list = self.data.get('defs', []) for macro in defs_list: pos = macro.find('=') if pos != -1: macro = macro[0:pos] if macro in CcTarget.__cxx_keyword_list: console.warning('DO NOT define c++ keyword %s as macro' % macro)
def _prepare_test_data(self, target): if 'testdata' not in target.data: return link_name_list = [] for i in target.data['testdata']: if isinstance(i, tuple): data_target = i[0] link_name = i[1] else: data_target = link_name = i if '..' in data_target: continue if link_name.startswith('//'): link_name = link_name[2:] err_msg, item = self.__check_link_name(link_name, link_name_list) if err_msg == 'AMBIGUOUS': console.error_exit('Ambiguous testdata of //%s:%s: %s, exit...' % ( target.path, target.name, link_name)) elif err_msg == 'INCOMPATIBLE': console.error_exit('%s could not exist with %s in testdata of //%s:%s' % ( link_name, item, target.path, target.name)) link_name_list.append(link_name) try: os.makedirs(os.path.dirname('%s/%s' % ( self._runfiles_dir(target), link_name))) except OSError: pass symlink_name = os.path.abspath('%s/%s' % ( self._runfiles_dir(target), link_name)) symlink_valid = False if os.path.lexists(symlink_name): if os.path.exists(symlink_name): symlink_valid = True console.warning('%s already existed, could not prepare ' 'testdata for //%s:%s' % ( link_name, target.path, target.name)) else: os.remove(symlink_name) console.warning('%s already existed, but it is a broken ' 'symbolic link, blade will remove it and ' 'make a new one.' % link_name) if data_target.startswith('//'): data_target = data_target[2:] dest_data_file = os.path.abspath(data_target) else: dest_data_file = os.path.abspath('%s/%s' % (target.path, data_target)) if not symlink_valid: os.symlink(dest_data_file, '%s/%s' % (self._runfiles_dir(target), link_name))
def _exec_get_version_info(self, cmd, cwd, dirname): lc_all_env = os.environ lc_all_env['LC_ALL'] = 'POSIX' p = subprocess.Popen(cmd, env=lc_all_env, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) std_out, std_err = p.communicate() if p.returncode: console.warning('failed to get version control info in %s' % dirname) else: self.svn_info_map[dirname] = std_out.replace('\n', '\\n\\\n')
def _check_code_style(opened_files): cpplint = configparse.blade_config.configs['cc_config']['cpplint'] console.info("Begin to check code style for source code") if not opened_files: return 0 p = subprocess.Popen(("%s %s" % (cpplint, ' '.join(opened_files))), shell=True) try: p.wait() if p.returncode: msg = "Please try fixing style warnings in the opened files before submitting the code!" console.warning(msg) except: # KeyboardInterrupt return 1 return 0
def proto_library_config(append=None, **kwargs): """protoc config. """ path = kwargs.get('protobuf_include_path') if path: console.warning(('%s: proto_library_config: protobuf_include_path has ' 'been renamed to protobuf_incs, and become a list') % blade_config.current_file_name) del kwargs['protobuf_include_path'] if isinstance(path, basestring) and ' ' in path: kwargs['protobuf_incs'] = path.split() else: kwargs['protobuf_incs'] = [path] blade_config.update_config('proto_library_config', append, kwargs)
def _check_deprecated_deps(self): """check that whether it depends upon a deprecated library. """ for dep in self.deps: target = self.target_database.get(dep, {}) if target.data.get('deprecated'): replaced_targets = target.deps replaced_target = '' if replaced_targets: replaced_target = replaced_targets[0] console.warning('//%s:%s : ' '//%s:%s has been deprecated, ' 'please depends on //%s:%s' % ( self.path, self.name, target.path, target.name, replaced_target[0], replaced_target[1]))
def proto_library_config(append=None, **kwargs): """protoc config. """ path = kwargs.get("protobuf_include_path") if path: console.warning( ("%s: proto_library_config: protobuf_include_path has been " "renamed to protobuf_incs, and become a list") % blade_config.current_file_name ) del kwargs["protobuf_include_path"] if isinstance(path, basestring) and " " in path: kwargs["protobuf_incs"] = path.split() else: kwargs["protobuf_incs"] = [path] blade_config.update_config("proto_library_config", append, kwargs)
def _download_jar(self, id): """Download the specified jar and its transitive dependencies. """ group, artifact, version = id.split(":") artifact = artifact + "-" + version jar = artifact + ".jar" pom = artifact + ".pom" log = artifact + "__download.log" cmd = " ".join( [ self.__maven, "dependency:get", "-DremoteRepositories=%s" % self.__central_repository, "-Dartifact=%s" % id, "> %s" % log, ] ) console.info("Downloading %s from central repository..." % jar) ret = subprocess.call(cmd, shell=True) if ret: console.warning( "Error occurred when downloading %s from central " "repository. Check %s for more details." % (jar, log) ) return False path = self._generate_jar_path(id) os.rename(log, os.path.join(path, log)) classpath = "classpath.txt" log = artifact + "__classpath.log" cmd = " ".join( [ self.__maven, "dependency:build-classpath", "-Dmdep.outputFile=%s" % classpath, "-f %s" % os.path.join(path, pom), "> %s" % os.path.join(path, log), ] ) console.info("Resolving %s dependencies..." % jar) ret = subprocess.call(cmd, shell=True) if ret: console.warning("Error occurred when resolving %s dependencies" % jar) return False classpath = os.path.join(path, classpath) with open(classpath) as f: # Read the first line self.__jar_database[id] = (os.path.join(path, jar), f.readline()) return True
def _append_config(self, section_name, config, append): """Append config section items""" if not isinstance(append, dict): console.error("%s: %s: append must be a dict" % (self.current_file_name, section_name)) else: for k in append.keys(): if k in config: if isinstance(config[k], list): config[k] += var_to_list(append[k]) else: console.warning( "%s: %s: config item %s is not a list" % (self.current_file_name, section_name, k) ) else: console.warning("%s: %s: unknown config item name: %s" % (self.current_file_name, section_name, k))
def _replace_config(self, section_name, config, user_config): """Replace config section items""" unknown_keys = [] for k in user_config: if k in config: if isinstance(config[k], list): user_config[k] = var_to_list(user_config[k]) else: user_config[k] = user_config[k] else: console.warning('%s: %s: unknown config item name: %s' % (self.current_file_name, section_name, k)) unknown_keys.append(k) for k in unknown_keys: del user_config[k] config.update(user_config)
def _filter_out_invalid_flags(self, flag_list, language='c'): """Filter the unsupported compilation flags. """ supported_flags, unsupported_flags = [], [] obj = os.path.join(self.build_dir, 'test.o') for flag in var_to_list(flag_list): cmd = ('echo "int main() { return 0; }" | ' '%s -o %s -c -x %s %s - > /dev/null 2>&1 && rm -f %s' % ( self.cc, obj, language, flag, obj)) if subprocess.call(cmd, shell=True) == 0: supported_flags.append(flag) else: unsupported_flags.append(flag) if unsupported_flags: console.warning('Unsupported C/C++ flags: %s' % ', '.join(unsupported_flags)) return supported_flags
def _set_pack_exclusions(self, exclusions): exclusions = var_to_list(exclusions) self.data['exclusions'] = [] for exclusion in exclusions: if maven.is_valid_id(exclusion): if '*' in exclusion: if not self.__is_valid_maven_id_with_wildcards(exclusion): console.warning('%s: Invalid maven id with wildcards %s. ' 'Ignored. The valid id could be: ' 'group:artifact:*, group:*:*, *:*:*' % (self.fullname, exclusion)) continue self.data['exclusions'].append(exclusion) else: console.warning('%s: Exclusions only support maven id ' 'group:artifact:version. Ignore %s' % ( self.fullname, exclusion))