def _raise(output, tgt1, tgt2): raise ValueError( "Duplicate output: {}\nCompare:\n{}\nVersus:\n{}".format( output, pformat(get_minified_target(tgt1)), pformat(get_minified_target(tgt2)), ) )
def register_target(self, target): assert target["output"] registered_targets = [] if "dependencies" not in target: target["dependencies"] = [] existing_target = self.find_target(target["output"]) if existing_target: if existing_target != target: logger.warn( "Target's output is already registered, but with different definition" ) existing_target_is_file = existing_target.get("type") == "file" new_target_is_not_file = target.get("type") != "file" if existing_target_is_file and new_target_is_not_file: logger.warning("Replacing existing file target") del self.target_index[existing_target["output"]] self.targets.remove(existing_target) elif self._is_object_lib(existing_target) and self._is_object_lib( target ): self._overwrite_target_output(existing_target, target) else: logger.error( "Only targets with type=module and module_type=object_lib are allowed to have the same output path" ) logger.info("Old target:") logger.info(pformat(get_minified_target(existing_target))) logger.info("New target:") logger.info(pformat(get_minified_target(target))) return registered_targets else: return registered_targets if "name" in target: target["name"] = self._get_target_name(target) registered_targets.extend(self.update_target(target, log=False)) working_dir = target.get("working_dir") if working_dir: target["working_dir"] = self.get_dir_arg(working_dir) logger.info(" > Registering new target:") logger.info(pformat(get_minified_target(target))) self.targets.append(target) self._add_target_to_index(target) registered_targets.append(target) return registered_targets
def _add_target_to_index(self, target): if "output" in target: self.target_index[target["output"]] = target for output in target.get("msvc_import_lib") or []: self.target_index[output] = target else: logger.warn("Target has no output:") logger.warn(pformat(get_minified_target(target)))
def _find_source_module_target(self, source): target = self.context.target_index.get(source) if target["type"] == "module": return target elif target["type"] == "module_copy": return self._find_source_module_target(target["source"]) else: # unexpected target type assert False, get_minified_target(target)
def update_target(self, target, log=True): registered_targets = [] for idx, dep in enumerate(target["dependencies"] or []): if type(dep) is dict: registered_targets.extend(self.register_target(dep)) target["dependencies"][idx] = dep["output"] if log: logger.info(" > Target updated:") logger.info(pformat(get_minified_target(target))) return registered_targets
def parse_targets(targets, context, parsers, log_type=None, project=None): result_targets = [] for target in targets: logger.debug(" > Parsing target:") logger.debug(pformat(get_minified_target(target))) for idx, parser in enumerate(parsers): is_applicable = getattr(parser, "is_applicable", None) if is_applicable is None or is_applicable( project=project, log_type=log_type ): logger.debug(type(parser).__name__) try: context.current_target = target result = parser.parse(target) if isinstance( result, list ): # parser is allowed to return multiple targets target = None result_targets += parse_targets( result, context, parsers[idx + 1:], log_type=log_type, project=project, ) break else: if target != result: logger.debug(" > Modified target:") logger.debug(pformat(get_minified_target(result))) target = result except Exception: logging.error(traceback.format_exc()) if target and "output" in target: result_targets.append(target) context.register_target(target) return result_targets
def optimize(self, targets): index = self._get_target_index(targets) optimized_targets = [] skipset = self._get_module_copy_skipset( index, targets ) | self._get_object_lib_skipset(index, targets) for target in targets: if target["type"] == "module_copy": if target["output"] in skipset: dep_target = get_final_module_copy_source(target, index) if target["source"] != dep_target["output"]: target["dependencies"].remove(target["source"]) target["dependencies"].append(dep_target["output"]) target["source"] = dep_target["output"] optimized_targets.append(target) continue elif target["type"] != "module": optimized_targets.append(target) continue if target["module_type"] == "object_lib": if target["output"] in skipset: optimized_targets.append(target) # object libraries are merged into parent targets continue remaining_object_files = [] for dep in target["objects"]: dep_target = index.get(dep) if ( dep_target is None or dep_target["output"] in skipset or dep_target["type"] in ("file", "cmd") ): remaining_object_files.append(dep) continue target["dependencies"].remove(dep) dep_target = get_final_module_copy_source(dep_target, index) assert ( dep_target["type"] == "module" and dep_target["module_type"] == "object_lib" ), get_minified_target(dep_target) # Source files from child object targets is going to be moved to # current target. # If current target already contains compile_flags or include_dirs, # they should be moved to source file scope, otherwise # source files from child object targets are going to inherit # them and may compile with unnecessary additional flags. for s in target["sources"]: if s.get("language") is not None: flags = copy(target["compile_flags"]) s["compile_flags"] = add_unique_stable( flags, *s["compile_flags"] ) dirs = copy(target["include_dirs"]) s["include_dirs"] = add_unique_stable(dirs, *s["include_dirs"]) target["compile_flags"] = [] target["include_dirs"] = [] is_cmd_target = ( lambda output: output in index and index[output].get("type") == "cmd" ) cmd_deps = [ d_ for d_ in dep_target["dependencies"] if is_cmd_target(d_) ] for d_ in cmd_deps: dep_target["dependencies"].remove(d_) for src in dep_target["sources"]: src["compile_flags"] = add_unique_stable( copy(dep_target["compile_flags"]), *src["compile_flags"] ) src["include_dirs"] = add_unique_stable( copy(dep_target["include_dirs"]), *src["include_dirs"] ) # Move type: 'cmd' dependencies from target to sources, # to allow more granular dependency control for later optimizers src["dependencies"] = add_unique_stable( cmd_deps, *(src.get("dependencies") or []) ) target["sources"].extend(deepcopy(dep_target["sources"])) # empty dirs and object output dirs won't be used, # leave only only include dirs and dirs with aggregated files unused_dir_target = ( lambda t: t is not None and t["type"] == "directory" and not t["output"] in dep_target["include_dirs"] ) deps = [ dep for dep in dep_target["dependencies"] if not unused_dir_target(index.get(dep)) ] add_unique_stable(target["dependencies"], *deps) target["objects"] = remaining_object_files if "libs" in target: modified_libs = [] for dep in target["libs"]: if isinstance(dep, dict): # { 'gcc_whole_archive': True, 'value': '<output>'} dep_output = dep["value"] else: dep_output = dep dep_target = index.get(dep_output) if dep_target is None: modified_libs.append(dep) continue if dep_target["type"] == "module_copy": dep_target = get_final_module_copy_source(dep_target, index) if dep_output != dep_target["output"]: target["dependencies"].remove(dep_output) target["dependencies"].append(dep_target["output"]) if isinstance(dep, dict): dep["value"] = dep_target["output"] else: dep = dep_target["output"] modified_libs.append(dep) target["libs"] = modified_libs optimized_targets.append(target) index = self._get_target_index(optimized_targets) # find module_copy targets without source and remove them pending_removal = [] for t in optimized_targets: if t["type"] == "module_copy": if t["source"] not in index: if t.get("top_level"): raise ValueError( "Lost source for top-level module_copy target: {}. This is probably a bug.".format( t["source"] ) ) pending_removal.append(t) for t in pending_removal: optimized_targets.remove(t) return optimized_targets