def uses(self, app): self.name() PyLog.debug("Determining usage", app.path(DF.source | DF.src), log_level=5) if not hasattr(self, "defaults"): self.defaults = {} elif not isinstance(self.src_exts, list): raise ToolChainException( "%s MUST define a list of src_exts during __init__!" % self.__class__) src_configs = [] for child in [app] + app.children: PyLog.debug("Looking in", child.path(DF.source | DF.src), log_level=6) if child.used(): src_configs.append(child) ## Return the list of apps used return src_configs
def path(self, options=0, dirs=[], subdirs=[], file_name=None, lang=None, relative_to=None, dep_name=None, dep_dir=None): ## Check for validity prefix = self.getPrefix(options) dir_name = self.getDirName(options, lang, dep_name, dep_dir) d = self.makeDir(prefix, dirs, dir_name, subdirs, file_name, options, relative_to) PyLog.debug("Generated dir", d, log_level=8, **{"options":options, "dirs":dirs, "subdirs":subdirs, "file_name":file_name, "lang":lang, "dep_name":dep_name, "dep_dir":dep_dir}) return d
def runAction(self, app, action, function): PyLog.log("%s(%s)" % (self.name(), app.name())) PyLog.debug("%s(%s)[%s]" % (self.name(), app.name(), app._dir), log_level=6) PyLog.increaseIndent() file_stats = {} try: ## Check if we're up to date file_stats = app.fileStats() updated = FD.FileStats.updatedFiles(file_stats) if len(updated) == 0: PyLog.log("Up to date", log_level=-1) return True else: PyLog.log("OUT of date", count=len(updated), log_level=-1) #PyLog.decreaseIndent() cmds = [] ## If no children but we're being built ## We must just be a shallow app. for child in [app] + app.children: t_cmds = function(child, app) PyLog.debug("Returned", t_cmds, log_level=6) if t_cmds: cmds += t_cmds if not cmds: cmds = [] for cmd in cmds: if hasattr(cmd, "__call__"): cmd(app) elif isinstance(cmd, basestring): if PySH.cmd(cmd, stdout=True, stderr=True) != 0: raise ToolChainException( "Failure while performing action", action=action, app=app, cmd=cmd) else: raise ToolChainException( "Invalid %s cmd. Cmds must be string or fun: %s : %s" % (action, app, cmd)) except PyExcept.BaseException as e: raise e except Exception as e: raise ToolChainException(None, trace=sys.exc_info()) finally: PyLog.decreaseIndent() FD.FileStats.saveFileStats(file_stats) PyLog.decreaseIndent() return True
def buildLibrary(self, child): build_dir = child.path(DF.build | DF.langlib | DF.app, lang="python") source_dir = child.path(DF.source | DF.src) PyLog.debug("BUILD Copy " + source_dir + " -> " + build_dir, log_level=6) PySH.copy(source_dir, build_dir, pattern="*.py", force=True, root_only=False)
def installLibrary(self, child): build_dir = child.path(DF.build | DF.langlib | DF.app, lang="python") install_dir = child.path(DF.install | DF.langlib | DF.app, lang="python") PyLog.debug("INSTALL Copy " + build_dir + " -> " + install_dir, log_level=-6) PySH.copy(build_dir, install_dir, pattern="*.py", force=True, root_only=False)
def findAppDirs(self, parent=None, root=".", app_dirs=[]): app_dirs.append((parent, root)) src_dir = os.path.join(root, "src") PyLog.debug("Finding app dirs in ", src_dir, log_level=6) nparent = root dirs = [] if os.path.isdir(src_dir): dirs = PyFind.getDirDirs(src_dir) else: nparent = parent dirs = PyFind.getDirDirs(root) PyLog.debug("Found app dirs", dirs, log_level=8) for d in dirs: if os.path.basename(d)[0] != ".": self.findAppDirs(nparent, d, app_dirs) return app_dirs
def __init__(self, tool, dir, src_exts, include_exts, include_pattern, *args): PyLog.debug("AppToolConfig created for", dir, *args, log_level=6) self.tool = tool self.type = AppToolConfig.Types.project self.dependency = False self._dir = dir self.src_root = dir self.package = False self.parent = None self.children = [] self.config = {} self.defaults = { "BUILD_AFTER_TOOLS" : set(), ## ToolChains that must be run first "BUILD_AFTER_APPS" : set(), ## Apps that must be built first "BUILD_AFTER" : set(), ## Specific Tool/App pairs that must be run first "DEPENDENCIES" : set(), ## Apps in seperate repositories "INCLUDE_DIRS" : set(), ## Directories that include files reside in "LIB_DIRS" : set(), ## Directories prebuilt libraries exist in "DOC_DIR" : "doc", ## Directory to install documentation into "BUILD_PREFIX" : "build", ## Directory to place built files into "DEP_DIR" : "deps", ## Directory to checkout code into "INSTALL_PREFIX" : "install", ## Directory to install into "TOOL_OPTIONS" : {}, "TOOL_CLI_OPTIONS" : set(), "APP_OPTIONS" : {}, ## MISC "SKIP_UPDATE" : False } self.constants = ["INSTALL_PREFIX"] self.types = { "INCLUDE_DIRS" : PyConfig.parseDirs, "LIB_DIRS" : PyConfig.parseDirs, "SKIP_UPDATE" : bool } self.allowed = {x : True for x in self.defaults} self.set_behavior = PyConfig.ConfigSetBehavior.merge self.src_exts = src_exts if src_exts == None: raise FishmongerConfigException("Toolchains MUST have src_exts defined", tool=self.tool, app=self.name()) self.include_exts = include_exts self.include_pattern = include_pattern self.parse(*args)
def runAction(self, max_cores=1): manager = multiprocessing.Manager() clean_queue = manager.Queue() used_cores = 0 tasks = {} commands = self.command_list dependencies = self.command_dependencies command = None result = True clean_key = None dependency_block = False while True: ## We continue to process as long as we haven't failed ## And as long as we have a command left to spawn if result != True: PyLog.debug("Result false", log_level=6) break elif len(commands) == 0 and command == None: PyLog.debug("Comamnd Stats", commands=commands, command=command, log_level=6) break ## If we have no command extract one if command == None: dependency_block = False ## Get the first command that has no dependencies outstanding for x in range(len(commands)): t_key = commands[x][0] if len(dependencies[t_key]) == 0: command = commands.pop(x) break PyLog.debug("Fetched Command", command=command, log_level=6) if command == None: dependency_block = True ## If we have a command AND we have available cores build/dispatch the task if used_cores < max_cores and command != None: PyLog.debug("Have cores", used_cores=used_cores, max_cores=max_cores, log_level=9) (key, action, app) = command PyLog.debug("Running Task", task=key, log_level=9) t_task = BuildTask(key, action, app, clean_queue) tasks[key] = t_task PyLog.debug("Starting task", log_level=9) t_task.start() used_cores += 1 command = None continue else: PyLog.debug("Waiting on cores or command dependencies", used_cores=used_cores, max_cores=max_cores, command=command, log_level=6) ## If we ever get to the point where we could not get a command AND no tasks are running ## There must be some error in the build dependencies ## Halt the system in error as we'll never take another action if used_cores == 0 and command == None and len(commands) != 0: PyLog.error("No commands can be built and no tasks are pending. Halting.", commands=commands, dependencies=dependencies) sys.exit(1) ## If we have no cores OR ## If we have remaining commands OR ## If we are dependency blocked if used_cores == max_cores or len(commands) == 0 or dependency_block == True: PyLog.debug("Waiting for a task to finish", cores=used_cores==max_cores, commands=len(commands), dependency_block=dependency_block, log_level=6) (clean_key, result) = clean_queue.get() PyLog.debug("Cleaning", clean_key=clean_key, result=result, log_level=6) tasks[clean_key].join() tasks[clean_key] = None used_cores -= 1 for d in dependencies: dependencies[d] -= set([clean_key]) for t in tasks: if tasks[t] is not None: PyLog.debug("Joining", task=t, log_level=6) if result != True: tasks[t].terminate() tasks[t].join() tasks[t] = None if not result: PyLog.error("Command returned unsuccessful result. Halting.", task=clean_key, result=result) sys.exit(1)
def configureAction(self, tool_chains, action): (external_tools, internal_tools) = tool_chains tool_chains = external_tools | internal_tools ## [Module] -> [ToolChain] tool_chains = [t.ToolChain() for t in tool_chains] ## [ToolChain] -> {String:ToolChain} tool_chains = {t.name() : t for t in tool_chains} external_tools = [t.ToolChain().name() for t in external_tools] internal_tools = [t.ToolChain().name() for t in internal_tools] tool_order = external_tools + internal_tools allconfig = self.allconfig children = copy.copy(self.children) usedconfig = {} external_exclusions = {} ## Figure out which nodes are used for tool in tool_order: for child in children: apptool = allconfig[tool][child] if apptool.type == FC.AppToolConfig.Types.subdir: PyLog.debug("Child is just a subdir. Let parent build it", child, log_level=5) continue if apptool.type == FC.AppToolConfig.Types.project: PyLog.debug("Child is just a project. Just build it's kids", child, log_level=5) continue if child in external_exclusions: PyLog.debug("Child used by previous external tool. Cannot be used again", child, external_exclusions[child], log_level=5) continue if tool_chains[tool].uses(apptool): ## Since we're used update our children so we only use the relevant ones apptool = apptool.clone(apptool._dir) apptool.children = [c for c in apptool.children if tool_chains[tool].uses(c)] if tool in external_tools: external_exclusions[child] = tool PyLog.debug("Users of Tool", tool, child, log_level=8) if tool not in usedconfig: usedconfig[tool] = {} usedconfig[tool][child] = apptool allconfig = usedconfig PyLog.debug("Updated Config", allconfig, log_level=9) key_dependencies = {} ## Build dependency graph digraph = PyGraph.DiGraph() edges = set() for tool in allconfig: for child in allconfig[tool]: apptool = allconfig[tool][child] name = apptool.name() root = apptool.path(DF.source|DF.root) t_edges = set() vertex_key = (tool, name) key_dependencies[vertex_key] = set() digraph.addVertex(PyGraph.Vertex(vertex_key, data={"tool":tool, "root":root})) ## If we build after a tool we build after all nodes of that tool after_tools = set() for t in apptool["BUILD_AFTER_TOOLS"]: ## Add an edge for each app that uses this tool ## if that tool is also used if t in allconfig: after_tools |= set([PyGraph.Edge((t, allconfig[t][a].name()), vertex_key, direction=PyGraph.EdgeDirection.LTR) for a in allconfig[t]]) key_dependencies[vertex_key] |= set([(t, allconfig[t][a].name()) for a in allconfig[t]]) PyLog.debug("After tools", vertex_key=vertex_key, build_after=after_tools, log_level=8) t_edges |= after_tools ## If we build after apps we build after them for each tool ## Since we're iterating for each tool we'll get to adding those nodes eventually after_apps = [] for after in apptool["BUILD_AFTER_APPS"]: ## We may be told to build after ourself... ## Don't do that if after == name: PyLog.warning("Attemping to build ourself after ourself... This seems silly please fix that") continue if after in allconfig[tool]: after_apps.append(PyGraph.Edge((tool, after), vertex_key, direction=PyGraph.EdgeDirection.LTR)) PyLog.debug("App and Afters", (tool, name), after_apps, log_level=8) t_edges |= set(after_apps) key_dependencies[vertex_key] |= set([e.getOther(vertex_key) for e in t_edges]) ## Add specific requirements t_edges |= set([PyGraph.Edge(x, vertex_key, direction=PyGraph.EdgeDirection.LTR) for x in apptool["BUILD_AFTER"]]) key_dependencies[vertex_key] |= set([x for x in apptool["BUILD_AFTER"]]) edges |= t_edges for edge in edges: digraph.addEdge(edge) ## Get order and strip out values we want taskorders = digraph.topologicalOrder() PyLog.debug("Build order", taskorders, log_level=9) self.command_list = [(order, getattr(tool_chains[digraph[order]["tool"]], action), allconfig[digraph[order]["tool"]][digraph[order]["root"]]) for order in taskorders] self.command_dependencies = key_dependencies
def configure(self, tool_chains): ## [Module] -> [ToolChain] tool_chains = [t.ToolChain() for t in tool_chains] ## [ToolChain] -> {String:ToolChain} tool_chains = {t.name() : t for t in tool_chains} ## [ToolChain] -> {String:{}} allconfig = {t : {} for t in tool_chains} ## Get the app directories app_dirs = self.findAppDirs(None, "../" + os.getcwd().split("/")[-1], []) ## parent dir -> [child_dir] children = {} ## Examine the directories and determine which tools are used ## At the same time link children to parents file_config = {} env_config = {} tool_config = {t : {} for t in tool_chains} app_config = {} include_dirs = set() lib_dirs = set() all_dep_dirs = set() for (parent, child) in app_dirs: include_dirs |= set(PyFind.findAllByPattern("*include*", root=child, dirs_only=True)) lib_dirs |= set(PyFind.findAllByPattern("*lib*", root=child, dirs_only=True)) t_env_config = {} t_app_config = {} if child not in children: children[child] = [] if parent in children: children[parent].append(child) t_app_config = app_config[parent] t_env_config = env_config[parent] for tool in tool_chains: nenv_config = file_config[os.path.join(child, ".fishmonger")] if os.path.join(child, ".fishmonger") in file_config else PyConfig.FileConfig(file=os.path.join(child, ".fishmonger"), config=env_config[parent].config if parent in env_config else {}) ntool_config = file_config[os.path.join(child, ".fishmonger" + tool)] if os.path.join(child, ".fishmonger" + tool) in file_config else PyConfig.FileConfig(file=os.path.join(child, ".fishmonger." + tool), config=tool_config[tool][parent].config if parent in tool_config[tool] else tool_chains[tool].defaults) napp_config = file_config[os.path.join(child, ".fishmonger.app")] if os.path.join(child, ".fishmonger.app") in file_config else PyConfig.FileConfig(file=os.path.join(child, ".fishmonger.app"), config=app_config[parent].config if parent in app_config else {}) env_config[child] = nenv_config tool_config[tool][child] = ntool_config app_config[child] = napp_config src_exts = tool_chains[tool].src_exts if hasattr(tool_chains[tool], "src_exts") else None include_exts = tool_chains[tool].include_exts if hasattr(tool_chains[tool], "include_exts") else None include_pattern = tool_chains[tool].include_pattern if hasattr(tool_chains[tool], "include_pattern") else None app_tool_config = FC.AppToolConfig( tool, child, src_exts, include_exts, include_pattern, nenv_config, ntool_config, napp_config ) if parent in allconfig[tool]: app_tool_config.parent = allconfig[tool][parent] app_tool_config.src_root = allconfig[tool][parent]._dir allconfig[tool][parent].children.append(app_tool_config) allconfig[tool][child] = app_tool_config dep_dirs = [(".", self.retrieveCode(app_tool_config["DEP_DIR"], x, skip_update=app_tool_config["SKIP_UPDATE"])) for x in app_tool_config["DEPENDENCIES"]] for (ignore, dep_dir) in dep_dirs: t_dep_dirs = self.findAppDirs(".", dep_dir, []) all_dep_dirs |= set(t_dep_dirs) new_dep_dirs = all_dep_dirs - set(app_dirs) app_dirs += list(new_dep_dirs) for tool in allconfig: for child in allconfig[tool]: allconfig[tool][child]["INCLUDE_DIRS"] = include_dirs allconfig[tool][child]["LIB_DIRS"] = lib_dirs ## Update types ##for child in allconfig[tool]: apptool = allconfig[tool][child] if apptool._dir in all_dep_dirs: apptool.dependency = True ## If we have no children and no src dir we are a subdir if len(apptool.children) == 0 and not os.path.isdir(os.path.join(child, "src")): apptool.type = FC.AppToolConfig.Types.subdir ## If we have no children but have a subdir we're a shallow app elif len(apptool.children) == 0 and os.path.isdir(os.path.join(child, "src")): apptool.type = FC.AppToolConfig.Types.app else: count = len(apptool.children) for c in children[child]: if os.path.isdir(os.path.join(c, "src")): count -= 1 ## We have children and all have a src dir; We're a project if count == 0: apptool.type = FC.AppToolConfig.Types.project ## If we're not a project and not a subdir we're an app else: apptool.type = FC.AppToolConfig.Types.app self.allconfig = allconfig self.children = children PyLog.debug("Child map", children, log_level=9) PyLog.debug("Generated Config", allconfig, log_level=9)
def installMisc(self, app): var_dir = app.path(DF.source | DF.root, subdirs=["var"]) install_var_dir = app.path(DF.install | DF.var | DF.absolute) if os.path.isdir(var_dir): PyLog.debug("Copying content...") PySH.copy(var_dir, install_var_dir, force=True)
def getRequiredApps(app, install_dir="."): PyLog.increaseIndent() global AppRequirements seen_apps = {} apps = [] PyLog.debug("Find", app) ## If we already exist just return if app in AppRequirements: PyLog.debug("Already exist!") PyLog.decreaseIndent() return AppRequirements[app] app_file = None for search_dir in [install_dir] + [ "/usr/lib/erlang", "/usr/lib64/erlang", "/usr/local/lib/erlang" ]: PyLog.debug("Searching for .app file", app=app, search_dir=search_dir, log_level=5) app_file = PyFind.find("*/" + app + ".app", search_dir) PyLog.debug("Found .app file", app_file=app_file, log_level=5) if app_file: break if not app_file: PyLog.debug("Doesnt exist!") PyLog.decreaseIndent() return apps doc = PyErl.parse_file(app_file) tuples = doc.getElementsByTagName("tuple") tuple = None ## Find Application tuple for ttuple in tuples: if ttuple[0].to_string() == "applications": tuple = ttuple break ## If no apps are specified we can just start if tuple == None: PyLog.debug("No apps required") PyLog.decreaseIndent() return [app] all_dapps = [] ## For each required app for dapp in tuple[1]: ## Get that apps required apps all_dapps += getRequiredApps(dapp.to_string(), install_dir) for t_app in all_dapps: if t_app in seen_apps: continue seen_apps[t_app] = 1 apps.append(t_app) apps.append(app) AppRequirements[app] = apps PyLog.debug("APP USE", app=app, apps=apps) PyLog.debug("Complete") PyLog.decreaseIndent() return apps