def _extract_nodes(digraph, extract_nodes): extract_nodes = set(extract_nodes) existing_nodes = set(digraph.keys()) invalid_nodes = extract_nodes - existing_nodes if len(invalid_nodes) > 0: logutil.warn('Trying to extract invalid nodes: %s' % ','.join(list(invalid_nodes))) extract_nodes = extract_nodes & existing_nodes etps = [] for ne in extract_nodes: parents = [] childern = digraph[ne] for nn in digraph: if ne in digraph[nn]: parents.append(nn) etp = ExtractedTriplet(ne, parents, childern) etps.append(etp) digraph_new = {} for nn in digraph: if nn not in extract_nodes: digraph_new[nn] = list(set(digraph[nn]) - extract_nodes) return digraph_new, etps
def parse_repo_layout_from_json(file_): """Parse the repo layout from a JSON file. Args: file_ (File): The source file. Returns: RepoLayout Raises: InvalidConfigFileError: The configuration file is invalid. """ def ascii_encode_dict(data): new_data = {} for key, value in data.items(): new_data[key] = [i.encode('ascii') for i in value] return new_data try: loaded_dict = json.load(file_, object_hook=ascii_encode_dict) except ValueError as e: raise blderror.InvalidConfigFileError('Invalid .bdelayoutconfig: %s' % e.message) repo_layout = repolayout.RepoLayout() for key in loaded_dict: if key in repo_layout.__dict__: setattr(repo_layout, key, loaded_dict[key]) else: logutil.warn('Invalid field in .bdelayoutconfig: %s.' % key) return repo_layout
def _verify_cycles_impl(self, digraph): cycles = graphutil.find_cycles(digraph) if len(cycles) == 0: logutil.end_msg('ok') else: logutil.end_msg('found cycle(s)', color='RED') for cycle in cycles: logutil.warn('CYCLE: ' + ','.join(cycle)) self.is_success = False
def _store_option_rule(self, rule, debug_keys=[]): """Store the key and value of an option rule. """ match = self._match_rule(rule) if rule.key in debug_keys: if match: logutil.info('Accept: %s' % rule) else: logutil.warn('Ingore: %s' % rule) if not match: return mc = self._OPT_INLINE_COMMAND_RE.search(rule.value) if mc: v = rule.value out = sysutil.shell_command(mc.group(1)).rstrip() rule.value = '%s"%s"%s' % (v[:mc.start(1) - 3], out, v[mc.end(1) + 3:]) mc2 = self._OPT_INLINE_COMMAND_RE2.match(rule.value) if mc2: out = sysutil.shell_command(mc2.group(1)).rstrip() rule.value = out key = rule.key value = rule.value if key == 'BDE_COMPILER_FLAG': global DEFAULT_COMPILER DEFAULT_COMPILER = value if key not in self.results: self.results[key] = value else: orig = self.results[key] if rule.command == optiontypes.OptionCommand.ADD: if orig: self.results[key] = orig + ' ' + value else: self.results[key] = value elif rule.command == optiontypes.OptionCommand.INSERT: if orig: self.results[key] = value + ' ' + orig else: self.results[key] = value elif rule.command == optiontypes.OptionCommand.APPEND: self.results[key] = orig + value elif rule.command == optiontypes.OptionCommand.PREPEND: self.results[key] = value + orig elif rule.command == optiontypes.OptionCommand.OVERRIDE: self.results[key] = value if rule.key in debug_keys: logutil.info('Update: %s -> %s\n' % (rule.key, self.results[rule.key]))
def get_default_option_rules(): """Return the default option rules. Args: msg_func (func, optional): function to print status messages Returns: list of OptionRule. Raises: MissingFileError: If default.opts can not be found. """ default_opts_path = os.path.join(sysutil.repo_root_path(), 'etc', 'default.opts') bde_root = os.environ.get('BDE_ROOT') found_default_opts = False found_default_internal_opts = False if not os.path.isfile(default_opts_path): logutil.warn('Cannot find default.opts at %s. ' 'Trying to use $BDE_ROOT/etc/default.opts instead.' % default_opts_path) if bde_root: default_opts_path = os.path.join(bde_root, 'etc', 'default.opts') if os.path.isfile(default_opts_path): found_default_opts = True else: found_default_opts = True if not found_default_opts: raise blderror.MissingFileError('Cannot find default.opts.') option_rules = optionsparser.parse_option_rules_file(default_opts_path) if bde_root: default_internal_opts_path = os.path.join(bde_root, 'etc', 'default_internal.opts') if os.path.isfile(default_internal_opts_path): found_default_internal_opts = True option_rules += optionsparser.parse_option_rules_file( default_internal_opts_path) else: logutil.warn('The BDE_ROOT environment variable is set, ' 'but $BDE_ROOT/etc/default_internal.opts does ' 'not exist.') logutil.msg("Using default option rules from", default_opts_path) if found_default_internal_opts: logutil.msg("Using default option rules from", default_internal_opts_path) return option_rules
def get_default_option_rules(): """Return the default option rules. Returns: list of OptionRule. Raises: MissingFileError: If default.opts can not be found. """ default_opts_path = os.path.join(sysutil.repo_root_path(), "etc", "default.opts") bde_root = os.environ.get("BDE_ROOT") found_default_opts = False found_default_internal_opts = False if not os.path.isfile(default_opts_path): logutil.warn("Cannot find default.opts at %s. " "Trying to use $BDE_ROOT/etc/default.opts instead." % default_opts_path) if bde_root: default_opts_path = os.path.join(bde_root, "etc", "default.opts") if os.path.isfile(default_opts_path): found_default_opts = True else: found_default_opts = True if not found_default_opts: raise blderror.MissingFileError("Cannot find default.opts.") option_rules = optionsparser.parse_option_rules_file(default_opts_path) if bde_root: default_internal_opts_path = os.path.join(bde_root, "etc", "default_internal.opts") if os.path.isfile(default_internal_opts_path): found_default_internal_opts = True option_rules += optionsparser.parse_option_rules_file( default_internal_opts_path) else: logutil.warn('The BDE_ROOT environment variable is set to "%s", ' 'but $BDE_ROOT/etc/default_internal.opts ("%s") does ' "not exist." % (bde_root, default_internal_opts_path)) logutil.msg("Using default option rules from", default_opts_path) if found_default_internal_opts: logutil.msg("Using default option rules from", default_internal_opts_path) return option_rules
def evaluate_key(key): if key in self.results: return self.results[key] elif key in self.options: result = re.sub(r'(\$\((\w+)\))', lambda m: evaluate_key(m.group(2)), self.options[key]) self.results[key] = result return self.results[key] elif key in os.environ: logutil.warn( 'Using the environment variable "%s" as an option key', key) self.results[key] = os.environ[key] return self.results[key] return ''
def evaluate_key(key): if key in self.results: return self.results[key] elif key in self.options: result = re.sub( r'(\$\((\w+)\))', lambda m: evaluate_key(m.group(2)), self.options[key]) self.results[key] = result return self.results[key] elif key in os.environ: logutil.warn( 'Using the environment variable "%s" as an option key', key) self.results[key] = os.environ[key] return self.results[key] return ''
def parse_repo_layout_from_json(file_): """Parse the repo layout from a JSON file. Args: file_ (File): The source file. Returns: RepoLayout Raises: InvalidConfigFileError: The configuration file is invalid. """ def encode_dict(data): new_data = {} for key, value in data.items(): # Waf Node API requires String objects if not isinstance(key, str): new_data[key.encode('utf-8')] = [i.encode('utf-8') for i in value] else: new_data[key] = value return new_data try: loaded_dict = json.load(file_, object_hook=encode_dict) except ValueError as e: raise blderror.InvalidConfigFileError('Invalid .bdelayoutconfig: %s' % e.message) repo_layout = repolayout.RepoLayout() for key in loaded_dict: if key in repo_layout.__dict__: setattr(repo_layout, key, loaded_dict[key]) else: logutil.warn('Invalid field in .bdelayoutconfig: %s.' % key) return repo_layout
def load_normal_package(package_name, oe): package = repo_context.units[package_name] int_oe = copy.deepcopy(oe) int_oe.store_option_rules(package.opts) int_oe.store_option_rules(package.cap) set_unit_loc(int_oe, package) if debug_keys: logutil.info('--Evaluating %s' % package_name) int_oe.evaluate(debug_keys) if int_oe.results.get('CAPABILITY') == 'NEVER': logutil.warn('Skipped non-supported package %s' % package_name) return False if package.type_ == repounits.PackageType.PACKAGE_PLUS: package_bc = buildconfig.PlusPackageBuildConfig() else: package_bc = buildconfig.InnerPackageBuildConfig() package_bc.name = package.name package_bc.path = package.path package_bc.dep = package.dep package_bc.type_ = package.type_ package_bc.flags = get_build_flags_from_opts(build_flags_parser, int_oe.results) package_bc.has_dums = package.has_dums if package.type_ == repounits.PackageType.PACKAGE_PLUS: package_bc.headers = package.pt_extras.headers package_bc.cpp_sources = package.pt_extras.cpp_sources package_bc.cpp_tests = package.pt_extras.cpp_tests package_bc.c_tests = package.pt_extras.c_tests else: package_bc.components = package.components build_config.inner_packages[package_name] = package_bc return True
def _store_option_rule(self, rule, debug_keys=[]): """Store the key and value of an option rule. """ match = self._match_rule(rule) if rule.key in debug_keys: if match: logutil.info('Accept: %s' % rule) else: logutil.warn('Ignore: %s' % rule) if not match: return # `subst` was a hack to remove a flag from the list of compiler flags # when building test drivers. This is no longer needed and will be # removed from opts files in BDE. It is explicitly ignored here for # backward compatibliity. if self._OPT_INLINE_SUBST_RE.match(rule.value): if rule.key in debug_keys: logutil.warn('Skipping rule: %s' % rule) return # `shell` returns output of a terminal command. It is used as part of a # hack to build bde-bb. mc = self._OPT_INLINE_COMMAND_RE.search(rule.value) if mc: v = rule.value out = sysutil.shell_command(mc.group(1)).rstrip() rule.value = '%s"%s"%s' % (v[:mc.start(1) - 3], out, v[mc.end(1) + 3:]) mc2 = self._OPT_INLINE_COMMAND_RE2.match(rule.value) if mc2: out = sysutil.shell_command(mc2.group(1)).rstrip() rule.value = out key = rule.key value = rule.value if key not in self.options: self.options[key] = value else: orig = self.options[key] if rule.command == optiontypes.OptionCommand.ADD: if orig: self.options[key] = orig + ' ' + value else: self.options[key] = value elif rule.command == optiontypes.OptionCommand.INSERT: if orig: self.options[key] = value + ' ' + orig else: self.options[key] = value elif rule.command == optiontypes.OptionCommand.APPEND: self.options[key] = orig + value elif rule.command == optiontypes.OptionCommand.PREPEND: self.options[key] = value + orig elif rule.command == optiontypes.OptionCommand.OVERRIDE: self.options[key] = value if rule.key in debug_keys: logutil.info('Update: %s -> %s\n' % (rule.key, self.options[rule.key]))
def load_uor(uor): # Preserve the existing behavior of loading defs, opts and cap files as # bde_build: # # - Exported options of an UOR: read the defs files of its dependencies # follow by itself. The files of the dependencies should be read in # topological order, if the order of certain dependencies are # ambiguous, order them first by dependency levels, and then by their # name. # # - Internal options of an UOR: read the defs files in the same way, # followed by its own opts file. dep_levels = graphutil.levelize(uor_dep_graph, uor_dep_graph[uor.name]) oe = copy.deepcopy(def_oe) # We load options in levelized order instead of any topological order # to preserve the behavior with bde_build (older version of the build # tool). Note that we cannot cache intermediate results because later # option rules may change the results from the preivous rule. for level in dep_levels: for dep_name in sorted(level): if dep_name not in build_config.external_dep and \ dep_name not in build_config.third_party_dirs: dep_uor = uor_map[dep_name] oe.store_option_rules(dep_uor.cap) oe.store_option_rules(dep_uor.defs) if (build_config.uplid.os_type == 'windows' and build_config.uplid.comp_type == 'cl'): # By default, Visual Studio uses a single pdb file for all object # files compiled from a particular directory named # vc<vs_version>.pdb. We want to use a separate pdb file for each # package group and standard alone package. # # BDE_CXXFLAGS and BDE_CFLAGS are defined by default.opts, so the # code below is a bit hackish. pdb_option = ' /Fd%s\\%s.pdb' % ( os.path.relpath(uor.path, build_config.root_path), uor.name) oe.results['BDE_CXXFLAGS'] += pdb_option oe.results['BDE_CFLAGS'] += pdb_option if uor.type_ == repounits.UnitType.GROUP: uor_bc = buildconfig.PackageGroupBuildConfig() elif uor.type_ in repounits.UnitTypeCategory.PACKAGE_STAND_ALONE_CAT: uor_bc = buildconfig.StdalonePackageBuildConfig() else: assert(False) uor_bc.name = uor.name uor_bc.path = uor.path uor_bc.doc = uor.doc uor_bc.version = uor.version uor_bc.dep = uor.dep - build_config.external_dep uor_bc.external_dep = uor.dep & build_config.external_dep # Store options from dependencies, options for exports, and internal # options separately dep_oe = copy.deepcopy(oe) dep_oe.evaluate() oe.store_option_rules(uor.cap) oe.store_option_rules(uor.defs) set_unit_loc(oe, uor) export_oe = copy.deepcopy(oe) int_oe = copy.deepcopy(oe) export_oe.evaluate() if export_oe.results.get('CAPABILITY') == 'NEVER': logutil.warn('Skipped non-supported UOR %s' % uor.name) return int_oe.store_option_rules(uor.opts) # Copy unevaluted internal options to be used by packages within # package groups. int_oe_copy = copy.deepcopy(int_oe) if debug_keys: logutil.info('--Evaluating %s' % uor.name) int_oe.evaluate(debug_keys) # Remove export flags of an uor's dependencies from its own export # flags. This implementation is not very optimal, but it's gets the # job done. dep_flags = get_build_flags_from_opts(build_flags_parser, dep_oe.results, dep_oe.results) uor_bc.flags = get_build_flags_from_opts( build_flags_parser, int_oe.results, export_oe.results, dep_flags.export_flags, dep_flags.export_libs) if uor.type_ == repounits.UnitType.GROUP: load_package_group(uor, uor_bc, int_oe_copy) elif uor.type_ in repounits.UnitTypeCategory.PACKAGE_STAND_ALONE_CAT: load_sa_package(uor, uor_bc) else: assert(False)
def log_warn(line, msg): logutil.warn("%s %d: %s" % (file_path, line, msg))
def load_uor(uor): # Preserve the existing behavior of loading defs, opts and cap files as # bde_build: # # - Exported options of an UOR: read the defs files of its dependencies # follow by itself. The files of the dependencies should be read in # topological order, if the order of certain dependencies are # ambiguous, order them first by dependency levels, and then by their # name. # # - Internal options of an UOR: read the defs files in the same way, # followed by its own opts file. dep_levels = graphutil.levelize(uor_dep_graph, uor_dep_graph[uor.name]) oe = copy.deepcopy(def_oe) # We load options in levelized order instead of any topological order # to preserve the behavior with bde_build (older version of the build # tool). Note that we cannot cache intermediate results because later # option rules may change the results from the preivous rule. for level in dep_levels: for dep_name in sorted(level): if dep_name not in build_config.external_dep and \ dep_name not in build_config.third_party_dirs: dep_uor = uor_map[dep_name] oe.store_option_rules(dep_uor.cap) oe.store_option_rules(dep_uor.defs) if (build_config.uplid.os_type == 'windows' and build_config.uplid.comp_type == 'cl'): # By default, Visual Studio uses a single pdb file for all object # files compiled from a particular directory named # vc<vs_version>.pdb. We want to use a separate pdb file for each # package group and standard alone package. # # BDE_CXXFLAGS and BDE_CFLAGS are defined by default.opts, so the # code below is a bit hackish. pdb_option = ' /Fd%s\\%s.pdb' % (os.path.relpath( uor.path, build_config.root_path), uor.name) oe.results['BDE_CXXFLAGS'] += pdb_option oe.results['BDE_CFLAGS'] += pdb_option if uor.type_ == repounits.UnitType.GROUP: uor_bc = buildconfig.PackageGroupBuildConfig() elif uor.type_ in repounits.UnitTypeCategory.PACKAGE_STAND_ALONE_CAT: uor_bc = buildconfig.StdalonePackageBuildConfig() else: assert (False) uor_bc.name = uor.name uor_bc.path = uor.path uor_bc.doc = uor.doc uor_bc.version = uor.version uor_bc.dep = uor.dep - build_config.external_dep uor_bc.external_dep = uor.dep & build_config.external_dep # Store options from dependencies, options for exports, and internal # options separately dep_oe = copy.deepcopy(oe) dep_oe.evaluate() oe.store_option_rules(uor.cap) oe.store_option_rules(uor.defs) set_unit_loc(oe, uor) export_oe = copy.deepcopy(oe) int_oe = copy.deepcopy(oe) export_oe.evaluate() if export_oe.results.get('CAPABILITY') == 'NEVER': logutil.warn('Skipped non-supported UOR %s' % uor.name) return int_oe.store_option_rules(uor.opts) # Copy unevaluted internal options to be used by packages within # package groups. int_oe_copy = copy.deepcopy(int_oe) if debug_keys: logutil.info('--Evaluating %s' % uor.name) int_oe.evaluate(debug_keys) # Remove export flags of an uor's dependencies from its own export # flags. This implementation is not very optimal, but it's gets the # job done. dep_flags = get_build_flags_from_opts(build_flags_parser, dep_oe.results, dep_oe.results) uor_bc.flags = get_build_flags_from_opts(build_flags_parser, int_oe.results, export_oe.results, dep_flags.export_flags, dep_flags.export_libs) if uor.type_ == repounits.UnitType.GROUP: load_package_group(uor, uor_bc, int_oe_copy) elif uor.type_ in repounits.UnitTypeCategory.PACKAGE_STAND_ALONE_CAT: load_sa_package(uor, uor_bc) else: assert (False)