def dump(self, output_file_name): with open(output_file_name, 'w') as f: print( '# This config file was generated by `blade dump --config --to-file=<FILENAME>`\n', file=f) for name, value in iteritems(self.configs): self._dump_section(name, value, f)
def ninja_build(self, outputs, rule, inputs=None, implicit_deps=None, order_only_deps=None, variables=None, implicit_outputs=None): """Generate a ninja build statement with specified parameters. """ outs = var_to_list(outputs) if implicit_outputs: outs.append('|') outs += implicit_outputs ins = [] if inputs: ins = var_to_list(inputs) if implicit_deps: ins.append('|') ins += implicit_deps if order_only_deps: ins.append('||') ins += order_only_deps self._write_rule('build %s: %s %s' % (' '.join(outs), rule, ' '.join(ins))) if variables: assert isinstance(variables, dict) for name, v in iteritems(variables): if v: self._write_rule(' %s = %s' % (name, v)) else: self._write_rule(' %s =' % name)
def _merge_passed_run_results_to_history(self, run_results): history_items = self.test_history['items'] for key, run_result in iteritems(run_results): old = history_items.get(key) if old and old.result.exit_code != 0: self.repaired_tests.append(key) history_items[key] = TestHistoryItem(self.test_jobs[key], first_fail_time=0, fail_count=0, result=run_result)
def _show_run_results(self, run_results, is_error=False): """Show the tests detail after scheduling them. """ tests = [] for key, result in iteritems(run_results): reason = self.test_jobs[key].reason tests.append((key, result.cost_time, reason, result.exit_code)) tests.sort(key=lambda x: x[1]) output_function = console.error if is_error else console.info for key, costtime, reason, result in tests: output_function('%s:%s triggered by %s, exit(%s), cost %.2f s' % ( key[0], key[1], reason, result, costtime), prefix=False)
def _detect_maven_conflicted_deps(self, scope, dep_jars): """ Maven dependencies might have conflict: same group and artifact but different version. Select higher version by default unless a specific version of maven dependency is specified as a direct dependency of the target """ # pylint: disable=too-many-locals maven_jar_versions = {} # (group, artifact) -> versions maven_jars = {} # (group, artifact, version) -> jars maven_repo = '.m2/repository/' for jar in set(dep_jars): if maven_repo not in jar or not os.path.exists(jar): console.debug('%s: %s not found in local maven repository' % (self.fullname, jar)) continue parts = jar[jar.find(maven_repo) + len(maven_repo):].split('/') if len(parts) < 4: continue version, artifact, group = parts[-2], parts[-3], '.'.join( parts[:-3]) key = group, artifact if key in maven_jar_versions: if version not in maven_jar_versions[key]: maven_jar_versions[key].append(version) else: maven_jar_versions[key] = [version] key = group, artifact, version if key in maven_jars: maven_jars[key].append(jar) else: maven_jars[key] = [jar] maven_dep_ids = self._get_maven_dep_ids() jars = [] for (group, artifact), versions in iteritems(maven_jar_versions): if len(versions) == 1: picked_version = versions[0] else: picked_version = None for v in versions: maven_id = ':'.join((group, artifact, v)) if maven_id in maven_dep_ids: picked_version = v break if picked_version is None or LooseVersion( v) > LooseVersion(picked_version): picked_version = v console.debug( '%s: Maven dependency version conflict %s:%s:{%s} during %s. Use %s' % (self.fullname, group, artifact, ', '.join(versions), scope, picked_version)) jars += maven_jars[group, artifact, picked_version] return sorted(jars)
def _get_target_files(self): """ Returns ----------- All the target files built by the target itself """ results = set() for _, v in iteritems(self.data['targets']): if isinstance(v, list): results.update(v) else: results.add(v) return sorted(results)
def _get_target_files(self): """ Returns ----------- All the target files built by the target itself """ self.get_rules() # Ensure rules were generated results = set() for _, v in iteritems(self.__targets): if isinstance(v, list): results.update(v) else: results.add(v) return sorted(results)
def _update_test_history(self): old_env = self.test_history.get('env', {}) env_keys = _filter_envs(os.environ.keys()) new_env = dict((key, os.environ[key]) for key in env_keys) if old_env and new_env != old_env: console.notice('Some tests will be run due to test environments changed:') new, old = _diff_env(new_env, old_env) if new: console.notice('New environments: %s' % new) if old: console.notice('Old environments: %s' % old) self.test_history['env'] = new_env self.env_md5 = md5sum(str(sorted(iteritems(new_env))))
def ninja_build(self, rule, outputs, inputs=None, implicit_deps=None, order_only_deps=None, variables=None, implicit_outputs=None, clean=None): """Generate a ninja build statement with specified parameters. Args: clean:list[str], files to be removed on clean, defaults to outputs + implicit_outputs, you can pass a empty list to prevent cleaning. (For example, if you want to remove the entire outer dir instead of single files) See ninja documents for description for other args. """ outputs = var_to_list(outputs) implicit_outputs = var_to_list(implicit_outputs) outs = outputs[:] if implicit_outputs: outs.append('|') outs += implicit_outputs ins = var_to_list(inputs) if implicit_deps: ins.append('|') ins += var_to_list(implicit_deps) if order_only_deps: ins.append('||') ins += var_to_list(order_only_deps) self._write_rule('build %s: %s %s' % (' '.join(outs), rule, ' '.join(ins))) clean = (outputs + implicit_outputs) if clean is None else var_to_list(clean) if clean: self._remove_on_clean(*clean) if variables: assert isinstance(variables, dict) for name, v in iteritems(variables): assert v is not None if v: self._write_rule(' %s = %s' % (name, v)) else: self._write_rule(' %s =' % name) self._write_rule('') # An empty line to improve readability
def _merge_failed_run_results_to_history(self, run_results): history_items = self.test_history['items'] for key, run_result in iteritems(run_results): old = history_items.get(key) if old: first_fail_time = old.first_fail_time or run_result.start_time fail_count = 1 if old.fail_count is None else old.fail_count + 1 else: first_fail_time = run_result.start_time fail_count = 1 if not old or old.result.exit_code == 0: self.new_failed_tests.append(key) history_items[key] = TestHistoryItem( self.test_jobs[key], first_fail_time=first_fail_time, fail_count=fail_count, result=run_result)
def _get_changed_files(targets, blade_root_dir, working_dir): scm_root_dirs = split_targets_into_scm_root(targets, working_dir) changed_files = set() for scm_root, (scm, dirs) in iteritems(scm_root_dirs): try: os.chdir(scm_root) if scm == 'svn': output = os.popen('svn st %s' % ' '.join(dirs)).read().split('\n') elif scm == 'git': status_cmd = 'git status --porcelain %s' % ' '.join(dirs) output = os.popen(status_cmd).read().split('\n') for f in output: seg = f.strip().split() if not seg or seg[0] != 'M' and seg[0] != 'A': continue f = seg[-1] fullpath = os.path.join(scm_root, f) changed_files.add(fullpath) finally: os.chdir(blade_root_dir) return changed_files
def _handle_protoc_plugins(self, plugins): """Handle protoc plugins and corresponding dependencies.""" protoc_plugin_config = config.get_section('protoc_plugin_config') protoc_plugins = [] protoc_plugin_deps, protoc_plugin_java_deps = set(), set() for plugin in plugins: if plugin not in protoc_plugin_config: self.error('Unknown plugin %s' % plugin) continue p = protoc_plugin_config[plugin] protoc_plugins.append(p) for language, v in iteritems(p.code_generation): for key in v['deps']: if key not in self.deps: self.deps.append(key) protoc_plugin_deps.add(key) if language == 'java': protoc_plugin_java_deps.add(key) self.attr['protoc_plugin_deps'] = list(protoc_plugin_deps) self.attr['exported_deps'] += list(protoc_plugin_java_deps) self.attr['protoc_plugins'] = protoc_plugins
def __init__(self, name, path, code_generation): self.name = name self.path = path assert isinstance(code_generation, dict) self.code_generation = {} for language, v in iteritems(code_generation): if language not in self.__languages: console.fatal( '%s: Language %s is invalid. ' 'Protoc plugins in %s are supported by blade currently.' % (name, language, ', '.join(self.__languages))) self.code_generation[language] = {} # Note that each plugin dep should be in the global target format # since protoc plugin is defined in the global scope deps = [] for dep in var_to_list(v['deps']): if dep.startswith('//'): dep = dep[2:] key = tuple(dep.split(':')) if key not in deps: deps.append(key) self.code_generation[language]['deps'] = deps
def __init__(self, name, srcs, deps, optimize, deprecated, generate_descriptors, plugins, source_encoding, blade, kwargs): """Init method. Init the proto target. """ # pylint: disable=too-many-locals srcs = var_to_list(srcs) self._check_proto_srcs_name(srcs) CcTarget.__init__(self, name, 'proto_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) if srcs: self.data['public_protos'] = [ self._source_file_path(s) for s in srcs ] proto_config = config.get_section('proto_library_config') protobuf_libs = var_to_list(proto_config['protobuf_libs']) protobuf_java_libs = var_to_list(proto_config['protobuf_java_libs']) protobuf_python_libs = var_to_list( proto_config['protobuf_python_libs']) # Hardcode deps rule to thirdparty protobuf lib. self._add_hardcode_library(protobuf_libs) self._add_hardcode_java_library(protobuf_java_libs) self._add_hardcode_library(protobuf_python_libs) plugins = var_to_list(plugins) self.data['protoc_plugins'] = plugins # Handle protoc plugin deps according to the language protoc_plugin_config = config.get_section('protoc_plugin_config') protoc_plugin_deps = set() protoc_plugin_java_deps = set() for plugin in plugins: if plugin not in protoc_plugin_config: console.error_exit('%s: Unknown plugin %s' % (self.fullname, plugin)) p = protoc_plugin_config[plugin] for language, v in iteritems(p.code_generation): for key in v['deps']: if key not in self.deps: self.deps.append(key) if key not in self.expanded_deps: self.expanded_deps.append(key) protoc_plugin_deps.add(key) if language == 'java': protoc_plugin_java_deps.add(key) self.data['protoc_plugin_deps'] = list(protoc_plugin_deps) # Normally a proto target depends on another proto target when # it references a message defined in that target. Then in the # generated code there is public API with return type/arguments # defined outside and in java it needs to export that dependency, # which is also the case for java protobuf library. self.data['exported_deps'] = self._unify_deps(var_to_list(deps)) self.data['exported_deps'] += self._unify_deps(protobuf_java_libs) self.data['exported_deps'] += list(protoc_plugin_java_deps) # Link all the symbols by default self.data['link_all_symbols'] = True self.data['deprecated'] = deprecated self.data['source_encoding'] = source_encoding self.data['java_sources_explict_dependency'] = [] self.data['python_vars'] = [] self.data['python_sources'] = [] self.data['generate_descriptors'] = generate_descriptors
def dump(self, output_file_name): with open(output_file_name, 'w') as f: for name, value in iteritems(self.configs): self._dump_section(name, value, f)
def _collect_slow_tests(self, run_results): return [(result.cost_time, key) for key, result in iteritems(run_results) if result.cost_time > self.options.show_tests_slower_than]
def _merge_run_results_to_history(self, run_results): for key, run_result in iteritems(run_results): self.test_history['items'][key] = TestHistoryItem( self.test_jobs[key], run_result)