def getCompiledSize(self, fileId, variants, optimize=None, recompile=True): if optimize == None: optimize = self._optimize # use object setting as default fileEntry = self._classes[fileId] filePath = fileEntry["path"] variantsId = util.toString(variants) if optimize: optimizeId = self.generateOptimizeId(optimize) cacheId = "compiledsize-%s-%s-%s" % (filePath, variantsId, optimizeId) else: cacheId = "compiledsize-%s-%s" % (filePath, variantsId) size, _ = self._cache.readmulti(cacheId, filePath) if size != None: return size if recompile == False: return -1 self._console.debug("Computing compiled size: %s..." % fileId) #tree = self._treeLoader.getTree(fileId, variants) #compiled = self.compileTree(tree) compiled = self.getCompiled(fileId, variants, optimize, format=True) # TODO: format=True is a hack here, since it is most likely size = len(compiled) self._cache.writemulti(cacheId, size) return size
def getCode(self, compOptions, treegen=treegenerator, featuremap={}): # source versions if not compOptions.optimize: compiled = filetool.read(self.path) if compOptions.format and compiled[ -1:] != "\n": # assure trailing \n compiled += '\n' # compiled versions else: optimize = compOptions.optimize variants = compOptions.variantset format_ = compOptions.format classVariants = self.classVariants() # relevantVariants is the intersection between the variant set of this job # and the variant keys actually used in the class relevantVariants = self.projectClassVariantsToCurrent( classVariants, variants) variantsId = util.toString(relevantVariants) optimizeId = self._optimizeId(optimize) cache = self.context["cache"] cacheId = "compiled-%s-%s-%s-%s" % (self.path, variantsId, optimizeId, format_) compiled, _ = cache.read(cacheId, self.path) if compiled == None: tree = self.optimize(None, optimize, variants, featuremap) compiled = self.serializeTree(tree, optimize, format_) if not "statics" in optimize: cache.write(cacheId, compiled) return compiled
def tree(self, treegen=treegenerator, force=False): cache = self.context['cache'] console = self.context['console'] tradeSpaceForSpeed = False # Caution: setting this to True seems to make builds slower, at least on some platforms!? cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString({})) self.treeId = cacheId # Lookup for unoptimized tree tree, _ = cache.read(cacheId, self.path, memory=tradeSpaceForSpeed) # Tree still undefined?, create it! if tree == None or force: console.debug("Parsing file: %s..." % self.id) console.indent() fileContent = filetool.read(self.path, self.encoding) fileId = self.path if self.path else self.id try: tokens = tokenizer.parseStream(fileContent, self.id) except SyntaxException, e: # add file info e.args = (e.args[0] + "\nFile: %s" % fileId, ) + e.args[1:] raise e console.outdent() console.debug("Generating tree: %s..." % self.id) console.indent() try: tree = treegen.createSyntaxTree(tokens, fileId) except SyntaxException, e: # add file info e.args = (e.args[0] + "\nFile: %s" % fileId, ) + e.args[1:] raise e
def messageStrings(self, variants): # this duplicates codef from Locale.getTranslation classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) cacheId = "messages-%s" % (variantsId,) cached = True console = self.context['console'] #messages, _ = cache.readmulti(cacheId, self.path) classInfo, cacheModTime = self._getClassCache() messages = classInfo[cacheId] if cacheId in classInfo else None if messages != None: return messages, cached console.debug("Looking for message strings: %s..." % self.id) console.indent() cached = False tree = self.tree() try: messages = self._findTranslationBlocks(tree, []) except NameError, detail: raise RuntimeError("Could not extract message strings from %s!\n%s" % (self.id, detail))
def tree(self, treegen=treegenerator, force=False): cache = self.context['cache'] console = self.context['console'] tradeSpaceForSpeed = False # Caution: setting this to True seems to make builds slower, at least on some platforms!? cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString({})) self.treeId = cacheId # Lookup for unoptimized tree tree, _ = cache.read(cacheId, self.path, memory=tradeSpaceForSpeed) # Tree still undefined?, create it! if tree == None or force: console.debug("Parsing file: %s..." % self.id) console.indent() # Tokenize fileContent = filetool.read(self.path, self.encoding) fileId = self.path if self.path else self.id try: tokens = tokenizer.Tokenizer().parseStream(fileContent, self.id) except SyntaxException, e: # add file info e.args = (e.args[0] + "\nFile: %s" % fileId,) + e.args[1:] raise e # Parse try: tree = treegen.createFileTree(tokens, fileId) except SyntaxException, e: # add file info e.args = (e.args[0] + "\nFile: %s" % fileId,) + e.args[1:] raise
def tree(self, treegen=treegenerator, force=False): cache = self.context["cache"] console = self.context["console"] tradeSpaceForSpeed = ( False ) # Caution: setting this to True seems to make builds slower, at least on some platforms!? cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString({})) self.treeId = cacheId # Lookup for unoptimized tree tree, _ = cache.read(cacheId, self.path, memory=tradeSpaceForSpeed) # Tree still undefined?, create it! if tree == None or force: console.debug("Parsing file: %s..." % self.id) console.indent() fileContent = filetool.read(self.path, self.encoding) tokens = tokenizer.parseStream(fileContent, self.id) console.outdent() console.debug("Generating tree: %s..." % self.id) console.indent() tree = treegen.createSyntaxTree(tokens) # allow exceptions to propagate # store unoptimized tree # print "Caching %s" % cacheId cache.write(cacheId, tree, memory=tradeSpaceForSpeed, writeToFile=True) console.outdent() return tree
def messageStrings(self, variants): # this duplicates codef from Locale.getTranslation classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent( classVariants, variants) variantsId = util.toString(relevantVariants) cacheId = "messages-%s" % (variantsId, ) cached = True console = self.context['console'] #messages, _ = cache.readmulti(cacheId, self.path) classInfo, cacheModTime = self._getClassCache() messages = classInfo[cacheId] if cacheId in classInfo else None if messages != None: return messages, cached console.debug("Looking for message strings: %s..." % self.id) console.indent() cached = False tree = self.tree() try: messages = self._findTranslationBlocks(tree, []) except NameError, detail: raise RuntimeError( "Could not extract message strings from %s!\n%s" % (self.id, detail))
def tree(self, treegen=treegenerator, force=False): cache = self.context['cache'] console = self.context['console'] tradeSpaceForSpeed = False # Caution: setting this to True seems to make builds slower, at least on some platforms!? cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString({})) self.treeId = cacheId # Lookup for unoptimized tree tree, _ = cache.read(cacheId, self.path, memory=tradeSpaceForSpeed) # Tree still undefined?, create it! if tree == None or force: console.debug("Parsing file: %s..." % self.id) console.indent() fileContent = filetool.read(self.path, self.encoding) tokens = tokenizer.parseStream(fileContent, self.id) console.outdent() console.debug("Generating tree: %s..." % self.id) console.indent() tree = treegen.createSyntaxTree(tokens) # allow exceptions to propagate # store unoptimized tree #print "Caching %s" % cacheId cache.write(cacheId, tree, memory=tradeSpaceForSpeed) console.outdent() return tree
def getAssets(self, assetMacros={}): # Memoizing needs assetMacros in the key, otherwise you get wrong # results with multiple builds in one generator run. macroskey = util.toString(assetMacros) macroskey = sha_construct(macroskey).hexdigest() if macroskey not in self._assetRegex: # prepare a regex encompassing all asset hints, asset macros resolved classAssets = self.getHints()['assetDeps'][:] iresult = [] # [AssetHint] for res in classAssets: # expand file glob into regexp res = re.sub(r'\*', ".*", res) # expand macros if res.find('${')>-1: expres = self._expandMacrosInMeta(assetMacros, res) else: expres = [res] # collect resulting asset objects for e in expres: assethint = AssetHint(res) assethint.clazz = self assethint.expanded = e assethint.regex = re.compile(e) if assethint not in iresult: iresult.append(assethint) self._assetRegex[macroskey] = iresult return self._assetRegex[macroskey]
def getCompiledSize(self, fileId, variants, optimize=None, recompile=True): if optimize == None: optimize = self._optimize # use object setting as default fileEntry = self._classes[fileId] filePath = fileEntry["path"] variantsId = util.toString(variants) if optimize: optimizeId = self.generateOptimizeId(optimize) cacheId = "compiledsize-%s-%s-%s" % (filePath, variantsId, optimizeId) else: cacheId = "compiledsize-%s-%s" % (filePath, variantsId) size = self._cache.readmulti(cacheId, filePath) if size != None: return size if recompile == False: return -1 self._console.debug("Computing compiled size: %s..." % fileId) #tree = self._treeLoader.getTree(fileId, variants) #compiled = self.compileTree(tree) compiled = self.getCompiled(fileId, variants, optimize, format=True) # TODO: format=True is a hack here, since it is most likely size = len(compiled) self._cache.writemulti(cacheId, size) return size
def clearTreeCache(self, variantSet, treegen=treegenerator): relevantVariants = self.projectClassVariantsToCurrent( self.classVariants(), variantSet) cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString(relevantVariants)) self.context['cache'].remove(cacheId) return
def getAssets(self, assetMacros={}): # Memoizing needs assetMacros in the key, otherwise you get wrong # results with multiple builds in one generator run. macroskey = util.toString(assetMacros) macroskey = sha_construct(macroskey).hexdigest() if macroskey not in self._assetRegex: # prepare a regex encompassing all asset hints, asset macros resolved classAssets = self.getHints()['assetDeps'][:] iresult = [] # [AssetHint] for res in classAssets: # expand file glob into regexp res = re.sub(r'\*', ".*", res) # expand macros if res.find('${') > -1: expres = self._expandMacrosInMeta(assetMacros, res) else: expres = [res] # collect resulting asset objects for e in expres: assethint = AssetHint(res) assethint.clazz = self assethint.expanded = e assethint.regex = re.compile(e) if assethint not in iresult: iresult.append(assethint) self._assetRegex[macroskey] = iresult return self._assetRegex[macroskey]
def getCode(self, compOptions, treegen=treegenerator, featuremap={}): # source versions if not compOptions.optimize: compiled = filetool.read(self.path) # assure trailing \n (e.g. to utilise ASI) if compiled[-1:] != "\n": compiled += '\n' # compiled versions else: optimize = compOptions.optimize variants = compOptions.variantset format_ = compOptions.format classVariants = self.classVariants() # relevantVariants is the intersection between the variant set of this job # and the variant keys actually used in the class relevantVariants = self.projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) optimizeId = self._optimizeId(optimize) cache = self.context["cache"] cacheId = "compiled-%s-%s-%s-%s" % (self.path, variantsId, optimizeId, format_) compiled, _ = cache.read(cacheId, self.path) if compiled == None: tree = self.optimize(None, optimize, variants, featuremap) compiled = self.serializeTree(tree, optimize, format_) if not "statics" in optimize: cache.write(cacheId, compiled) return compiled
def messageStrings(self, variants): # this duplicates codef from Locale.getTranslation classVariants = self.classVariants() relevantVariants = projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) cacheId = "messages-%s-%s" % (self.path, variantsId) messages = cache.readmulti(cacheId, self.path) if messages != None: return messages console.debug("Looking for message strings: %s..." % self.id) console.indent() tree = self.tree(variants) #try: if True: messages = self._findTranslationBlocks(tree, []) #except NameError, detail: # raise RuntimeError("Could not extract message strings from %s!\n%s" % (self.id, detail)) if len(messages) > 0: console.debug("Found %s message strings" % len(messages)) console.outdent() cache.writemulti(cacheId, messages) return messages
def getTreeCacheId(optimize=[], variantSet={}): classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) return "tree%s-%s-%s-%s" % ( treegenerator.tag, # TODO: hard-coded treegen.tag self.path, self._optimizeId(optimize), util.toString(relevantVariants), )
def getTreeCacheId(optimize=[], variantSet={}): classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent( classVariants, variantSet) return "tree%s-%s-%s-%s" % ( treegenerator.tag, # TODO: hard-coded treegen.tag self.path, self._optimizeId(optimize), util.toString(relevantVariants))
def checkCache(self, fileId, variants, optimize, format=False): filePath = self._classes[fileId]["path"] variantsId = util.toString(variants) optimizeId = self.generateOptimizeId(optimize) cacheId = "compiled-%s-%s-%s-%s" % (filePath, variantsId, optimizeId, format) compiled = self._cache.read(cacheId, filePath) return cacheId, compiled
def tree(self, variantSet={}): context = self.context cache = context['cache'] tradeSpaceForSpeed = False # Caution: setting this to True seems to make builds slower, at least on some platforms!? # Construct the right cache id unoptCacheId = "tree-%s-%s" % (self.path, util.toString({})) classVariants = [] tree = None classVariants = self.classVariants(generate=False) # just check the cache if classVariants == None: tree = self._getSourceTree(unoptCacheId, tradeSpaceForSpeed) classVariants= self._variantsFromTree(tree) relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) cacheId = "tree-%s-%s" % (self.path, util.toString(relevantVariants)) # Get the right tree to return if cacheId == unoptCacheId and tree: # early return optimization return tree opttree, cacheMod = cache.read(cacheId, self.path, memory=tradeSpaceForSpeed) if not opttree: # start from source tree if tree: opttree = tree else: opttree = self._getSourceTree(unoptCacheId, tradeSpaceForSpeed) # do we have to optimze? if cacheId == unoptCacheId: return opttree else: context["console"].debug("Selecting variants: %s..." % self.id) context["console"].indent() variantoptimizer.search(opttree, variantSet, self.id) context["console"].outdent() # store optimized tree #print "Caching %s" % cacheId cache.write(cacheId, opttree, memory=tradeSpaceForSpeed, writeToFile=True) return opttree
def checkCache(self, fileId, variants, optimize, format=False): filePath = self._classes[fileId]["path"] classVariants = self._classesObj[fileId].classVariants() relevantVariants = Class.projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) optimizeId = self.generateOptimizeId(optimize) cacheId = "compiled-%s-%s-%s-%s" % (filePath, variantsId, optimizeId, format) compiled, _ = self._cache.read(cacheId, filePath) return cacheId, compiled
def _checkCache(self, fileId, variants, optimize, format=False): filePath = self._classes[fileId].path classVariants = self._classes[fileId].classVariants() relevantVariants = Class.projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) optimizeId = self.generateOptimizeId(optimize) cacheId = "compiled-%s-%s-%s-%s" % (filePath, variantsId, optimizeId, format) compiled, _ = self._cache.read(cacheId, filePath) return cacheId, compiled
def optimizeTree(self, variantSet, scriptClasses, treegen=treegenerator): relevantVariants = self.projectClassVariantsToCurrent(self.classVariants(), variantSet) cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString(relevantVariants)) compOpts = CompileOptions(optimize=["variants"], variants=variantSet) compOpts.allClassVariants = scriptClasses tree = self.optimizeEnvironmentClass(compOpts) ## this is for the side-effect of leaving a modified tree for qx.core.Environmet ## in the cache! self.context["cache"].write(cacheId, tree, memory=True, writeToFile=False) ## this is for the side-effect of re-calculating the transitive dependencies ## of qx.core.Environment! _ = self.dependencies(variantSet, force=True) return
def getTree(self, fileId, variants=None): fileEntry = self._classes[fileId] filePath = fileEntry["path"] if variants: cacheId = "tree-%s-%s" % (filePath, util.toString(variants)) else: cacheId = "tree-%s" % filePath tradeSpaceForSpeed = False # Caution: setting this to True seems to make builds slower, at least on some platforms!? tree = self._cache.read(cacheId, filePath, memory=tradeSpaceForSpeed) if tree != None: return tree # Lookup for unoptimized tree if variants != None: tree = self._cache.read("tree-%s" % fileId, filePath, memory=tradeSpaceForSpeed) # Tree still undefined?, create it! if tree == None: self._console.debug("Parsing file: %s..." % fileId) self._console.indent() fileContent = filetool.read(fileEntry["path"], fileEntry["encoding"]) tokens = tokenizer.parseStream(fileContent, fileId) self._console.outdent() self._console.debug("Generating tree: %s..." % fileId) self._console.indent() tree = treegenerator.createSyntaxTree(tokens) # allow exceptions to propagate # store unoptimized tree self._cache.write("tree-%s" % fileId, tree, memory=tradeSpaceForSpeed, writeToFile=True) self._console.outdent() # Call variant optimizer if variants != None: self._console.debug("Selecting variants: %s..." % fileId) self._console.indent() variantoptimizer.search(tree, variants, fileId) self._console.outdent() # store optimized tree self._cache.write(cacheId, tree, memory=tradeSpaceForSpeed, writeToFile=True) return tree
def _getCompiled(self, compOptions): ## # Interface to ecmascript.backend def serializeCondensed(tree, format_=False): result = [u''] result = Packer().serializeNode(tree, None, result, format_) return u''.join(result) def serializeFormatted(tree): # provide minimal pretty options def options(): pass pretty.defaultOptions(options) options.prettypCommentsBlockAdd = False # turn off comment filling result = [u''] result = pretty.prettyNode(tree, options, result) return u''.join(result) # ---------------------------------------------------------------------- optimize = compOptions.optimize variants = compOptions.variantset format_ = compOptions.format classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) optimizeId = self._optimizeId(optimize) cache = self.context["cache"] # Caution: Sharing cache id with TreeCompiler cacheId = "compiled-%s-%s-%s-%s" % (self.path, variantsId, optimizeId, format_) compiled, _ = cache.read(cacheId, self.path) if compiled == None: tree = self.tree(variants) tree = self.optimize(tree, optimize) if optimize == ["comments"]: compiled = serializeFormatted(tree) if compiled[-1:] != "\n": # assure trailing \n compiled += '\n' else: compiled = serializeCondensed(tree, format_) cache.write(cacheId, compiled) return compiled
def optimizeTree(self, variantSet, scriptClasses, treegen=treegenerator): relevantVariants = self.projectClassVariantsToCurrent( self.classVariants(), variantSet) cacheId = "tree%s-%s-%s" % (treegen.tag, self.path, util.toString(relevantVariants)) compOpts = CompileOptions(optimize=["variants"], variants=variantSet) compOpts.allClassVariants = scriptClasses tree = self.optimizeEnvironmentClass(compOpts) ## this is for the side-effect of leaving a modified tree for qx.core.Environmet ## in the cache! self.context['cache'].write(cacheId, tree, memory=True, writeToFile=False) ## this is for the side-effect of re-calculating the transitive dependencies ## of qx.core.Environment! _ = self.dependencies(variantSet, force=True) return
def sortClassesRecurser(classId, classListSorted, path): if classId in classListSorted: return # reading dependencies if classId == "qx.core.Environment": envObj = self._classesObj["qx.core.Environment"] envTreeId = "tree-%s-%s" % (envObj.path, util.toString({})) # TODO: {} is a temp. hack self._cache.remove( envTreeId ) # clear pot. memcache, so already (string) optimized tree is not optimized again (e.g. with Demobrowser) deps, cached = self.getCombinedDeps(classId, variants, buildType) if self._console.getLevel() is "info": self._console.dot("%s" % "." if cached else "*") # path is needed for recursion detection if not classId in path: path.append(classId) # process loadtime requirements for dep in deps["load"]: dep_name = dep.name if dep_name in classList and not dep_name in classListSorted: if dep_name in path: self._console.warn("Detected circular dependency between: %s and %s" % (classId, dep_name)) self._console.indent() self._console.debug("currently explored dependency path: %r" % path) self._console.outdent() raise RuntimeError("Circular class dependencies") else: sortClassesRecurser(dep_name, classListSorted, path) if not classId in classListSorted: # remove element from path path.remove(classId) # print "Add: %s" % classId classListSorted.append(classId) return
def getTree(self, fileId, variants=None): fileEntry = self._classes[fileId] filePath = fileEntry["path"] if variants: cacheId = "tree-%s-%s" % (filePath, util.toString(variants)) else: cacheId = "tree-%s" % filePath tree = self._cache.read(cacheId, filePath) if tree != None: return tree # Lookup for unoptimized tree if variants != None: tree = self._cache.read("tree-%s" % fileId, filePath) # Tree still undefined?, create it! if tree == None: self._console.debug("Parsing file: %s..." % fileId) self._console.indent() fileContent = filetool.read(fileEntry["path"], fileEntry["encoding"]) tokens = tokenizer.parseStream(fileContent, fileId) self._console.outdent() self._console.debug("Generating tree: %s..." % fileId) self._console.indent() try: tree = treegenerator.createSyntaxTree(tokens) except treegenerator.SyntaxException, detail: self._console.error("%s" % detail) sys.exit(1) self._console.outdent() self._console.debug("Selecting variants: %s..." % fileId) self._console.indent()
def getTranslation(self, fileId, variants): fileEntry = self._classes[fileId] filePath = fileEntry["path"] variantsId = util.toString(variants) cacheId = "translation-%s-%s" % (filePath, variantsId) translation = self._cache.readmulti(cacheId, filePath) if translation != None: return translation self._console.debug("Looking for translation strings: %s..." % fileId) self._console.indent() tree = self._treeLoader.getTree(fileId, variants) try: translation = self._findTranslationBlocks(tree, []) except NameError, detail: self._console.error("Could not extract translation from %s!" % fileId) self._console.error("%s" % detail) sys.exit(1)
def getTranslation(self, fileId, variants): fileEntry = self._classes[fileId] filePath = fileEntry["path"] classVariants = self._classesObj[fileId].classVariants() relevantVariants = Class.projectClassVariantsToCurrent(classVariants, variants) variantsId = util.toString(relevantVariants) cacheId = "translation-%s-%s" % (filePath, variantsId) translation = self._cache.readmulti(cacheId, filePath) if translation != None: return translation self._console.debug("Looking for translation strings: %s..." % fileId) self._console.indent() #tree = self._treeLoader.getTree(fileId, variants) tree = self._classesObj[fileId].tree(variants) try: translation = self._findTranslationBlocks(tree, []) except NameError, detail: raise RuntimeError("Could not extract translation from %s!\n%s" % (fileId, detail))
def getDeps(self, fileId, variants): # find dependencies of class named <fileId> in its code (both meta hints as # well as source code) def analyzeClassDeps(fileId, variants): ## analyze with no variants #loadtimeDepsNV = [] # NV = no variants #runtimeDepsNV = [] #undefDepsNV = [] #tree = self._treeLoader.getTree(fileId, {}) #self._analyzeClassDepsNode(fileId, tree, loadtimeDepsNV, runtimeDepsNV, undefDepsNV, False, variants) # now analyze with variants loadtimeDeps = [] runtimeDeps = [] undefDeps = [] tree = self._treeLoader.getTree(fileId, variants) self._analyzeClassDepsNode(fileId, tree, loadtimeDeps, runtimeDeps, undefDeps, False, variants) ## this should be for *source* version only! #if "qx.core.Variant" in loadtimeDepsNV and "qx.core.Variant" not in loadtimeDeps: # loadtimeDeps.append("qx.core.Variant") return loadtimeDeps, runtimeDeps, undefDeps def buildShallowDeps(): # Notes: # load time = before class = require # runtime = after class = use load = [] run = [] ignore = [DependencyItem(x,-1) for x in self._defaultIgnore] self._console.debug("Gathering dependencies: %s" % fileId) self._console.indent() # Read meta data meta = self.getMeta(fileId) metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps" , []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps" , []) # Process meta data load.extend(DependencyItem(x,-1) for x in metaLoad) run.extend(DependencyItem(x,-1) for x in metaRun) ignore.extend(DependencyItem(x,-1) for x in metaIgnore) # Read content data (autoLoad, autoRun, autoWarn) = analyzeClassDeps(fileId, variants) # Process content data if not "auto-require" in metaIgnore: for dep in autoLoad: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): self._console.warn("%s: #require(%s) is auto-detected" % (fileId, item)) else: load.append(dep) if not "auto-use" in metaIgnore: for dep in autoRun: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): pass elif item in (x.name for x in run): self._console.warn("%s: #use(%s) is auto-detected" % (fileId, item)) else: run.append(dep) self._console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, 'undef' : autoWarn } return deps # ----------------------------------------------------------------- if not self._classes.has_key(fileId): raise NameError("Could not find class to fulfill dependency: %s" % fileId) filePath = self._classes[fileId]["path"] cacheId = "deps-%s-%s" % (filePath, util.toString(variants)) # print "Read from cache: %s" % fileId deps = self._cache.readmulti(cacheId, filePath) if deps == None: deps = buildShallowDeps() self._cache.writemulti(cacheId, deps) #deps = self._traverseDependencies(deps) return deps
def getDeps(self, fileId, variants): ## # Wrapper around the tree recurser, _analyzeClassDepsNode def analyzeClassDeps(fileId, variants): loadtimeDeps = [] runtimeDeps = [] undefDeps = [] tree = self._classesObj[fileId].tree(variants) self._analyzeClassDepsNode(fileId, tree, loadtimeDeps, runtimeDeps, undefDeps, False, variants) return loadtimeDeps, runtimeDeps, undefDeps ## # Handle meta hints and call analyzeClassDeps def buildShallowDeps(): # Notes: # load time = before class = require # runtime = after class = use load = [] run = [] ignore = [DependencyItem(x,-1) for x in self._defaultIgnore] self._console.debug("Gathering dependencies: %s" % fileId) self._console.indent() # Read meta data meta = self.getMeta(fileId) metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps" , []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps" , []) # Process meta data load.extend(DependencyItem(x,-1) for x in metaLoad) run.extend(DependencyItem(x,-1) for x in metaRun) ignore.extend(DependencyItem(x,-1) for x in metaIgnore) # Read content data (autoLoad, autoRun, autoWarn) = analyzeClassDeps(fileId, variants) # Process content data if not "auto-require" in metaIgnore: for dep in autoLoad: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): self._console.warn("%s: #require(%s) is auto-detected" % (fileId, item)) else: load.append(dep) if not "auto-use" in metaIgnore: for dep in autoRun: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): pass elif item in (x.name for x in run): self._console.warn("%s: #use(%s) is auto-detected" % (fileId, item)) else: run.append(dep) self._console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, 'undef' : autoWarn } return deps # -- Main --------------------------------------------------------- # # Handle caching and invoke buildShallowDeps # if fileId not in self._classesObj: raise NameError("Could not find class to fulfill dependency: %s" % fileId) clazz = self._classesObj[fileId] filePath = clazz.path classVariants = clazz.classVariants() #classVariants = variants.keys() # a do-nothing alternative relevantVariants = Class.projectClassVariantsToCurrent(classVariants, variants) cacheId = "deps-%s-%s" % (filePath, util.toString(relevantVariants)) # print "Read from cache: %s" % fileId deps = self._cache.readmulti(cacheId, filePath) if deps == None: deps = buildShallowDeps() self._cache.writemulti(cacheId, deps) #deps = self._traverseDependencies(deps) return deps
def dependencies(self, variantSet, force=False, tree=None): ## # Get deps from meta info and class code, and sort them into # load/run/ignore deps. # # Note: # load time = before class = require # run time = after class = use def buildShallowDeps(tree=None): load = [] run = [] ignore = [DependencyItem(x, '', "|DefaultIgnoredNamesDynamic|") for x in self.defaultIgnoredNamesDynamic] console.debug("Analyzing tree: %s" % self.id) console.indent() # Read meta data meta = self.getHints() metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps" , []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps" , []) metaIgnore.extend(metaOptional) # regexify globs in metaignore metaIgnore = map(MetaIgnore, metaIgnore) # Turn strings into DependencyItems() for target,metaHint in ((load,metaLoad), (run,metaRun), (ignore,metaIgnore)): for key in metaHint: # add all feature checks if requested if key == "feature-checks" and metaHint in (metaLoad, metaRun): target.extend(self.getAllEnvChecks(-1, metaHint==metaLoad)) # turn an entry into a DependencyItem elif isinstance(key, types.StringTypes): sig = key.split('#',1) className = sig[0] attrName = sig[1] if len(sig)>1 else '' target.append(DependencyItem(className, attrName, self.id, "|hints|")) # Read source tree data if not tree: if variantSet: # a filled variantSet map means that "variants" optimization is wanted tree = self.optimize(None, ["variants"], variantSet) else: tree = self.tree() # do lint checking here, as we have a classList ("ClassesAll") to check globals against if True: # construct parse-level lint options opts = lint.defaultOptions() opts.library_classes = ClassesAll.keys() opts.class_namespaces = ClassList.namespaces_from_classnames(opts.library_classes) # some sensible settings (deviating from defaultOptions) opts.ignore_no_loop_block = True opts.ignore_reference_fields = True opts.ignore_undeclared_privates = True opts.ignore_unused_variables = True # override from config jobConf = Context.jobconf for option, value in jobConf.get("lint-check", {}).items(): setattr(opts, option.replace("-","_"), value) lint.lint_check(tree, self.id, opts) # analyze tree treeDeps = [] # will be filled by _analyzeClassDepsNode self._analyzeClassDepsNode(tree, treeDeps, inLoadContext=True) # Process source tree data for dep in treeDeps: if dep.isLoadDep: if "auto-require" not in metaIgnore: item = dep.name if item in metaIgnore: pass elif item in metaLoad: console.warn("%s: #require(%s) is auto-detected" % (self.id, item)) else: # adding all items to list (the second might have needsRecursion) load.append(dep) else: # runDep if "auto-use" not in metaIgnore: item = dep.name if item in metaIgnore: pass #elif item in (x.name for x in load): # pass elif item in metaRun: console.warn("%s: #use(%s) is auto-detected" % (self.id, item)) else: # adding all items to list (to comply with the 'load' deps) run.append(dep) console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, } return deps def buildTransitiveDeps(shallowDeps): newLoad = set(shallowDeps['load']) classMaps = {} for dep in shallowDeps['load']: if dep.needsRecursion: recDeps = self.getTransitiveDeps(dep, variantSet, classMaps, force=force) # need variantSet here (not relevantVariants), as the recursive deps might depend on any of those for recdep in recDeps: recdep.isLoadDep = True # all these become load dependencies newLoad.update(recDeps) shallowDeps['load'] = list(newLoad) return shallowDeps ## # Check wether load dependencies are fresh which are included following # a depsItem.needsRecursion of the current class def transitiveDepsAreFresh(depsStruct, cacheModTime): result = True if cacheModTime is None: # TODO: this can currently only occur with a Cache.memcache result result = False else: for dep in depsStruct["load"]: if dep.requestor != self.id: # this was included through a recursive traversal if dep.name in ClassesAll: classObj = ClassesAll[dep.name] if cacheModTime < classObj.m_time(): #if cacheModTime < classObj.library.mostRecentlyChangedFile()[1]: console.debug("Invalidating dep cache for %s, as %s is newer" % (self.id, classObj.id)) result = False break # checking classObj.m_time() was done a lot, and was a major time consumer, # esp. when building demobrowser; just checking a library's youngest entry is # much faster, as it is only calculated once (when called without (force=True)); # the downside is that a change of one class in a library will result in cache # invalidation for *all* classes in this lib; that's the trade-off; # i'd love to just check the libs directly ("for lib in script.libraries: # if cacheModTime < lib.mostRecentlyChangedFile()[1]:..."), but I don't # have access to the script here in Class. return result # -- Main --------------------------------------------------------- # handles cache and invokes worker function console = self.context['console'] classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) cacheId = "deps-%s-%s" % (self.path, util.toString(relevantVariants)) cached = True # try compile cache classInfo, classInfoMTime = self._getClassCache() (deps, cacheModTime) = classInfo[cacheId] if cacheId in classInfo else (None,None) # try dependencies.json if (True # just a switch and deps == None # TODO: temp. hack to work around issue with 'statics' optimization and dependencies.json and 'statics' not in Context.jobconf.get("compile-options/code/optimize",[]) ): deps_json, cacheModTime = self.library.getDependencies(self.id) if deps_json is not None: #console.info("using dependencies.json for: %s" % self.id) deps = self.depsItems_from_Json(deps_json) # don't cache at all, so later 'statics' optimized jobs don't # pick up the short depsList from cache if (deps == None or force == True or not transitiveDepsAreFresh(deps, cacheModTime)): cached = False deps = buildShallowDeps(tree) deps = buildTransitiveDeps(deps) if not tree: # don't cache for a passed-in tree classInfo[cacheId] = (deps, time.time()) self._writeClassCache(classInfo) return deps, cached
def getTransitiveDeps(self, depsItem, variants, classMaps, checkSet=None, force=False): ## # find dependencies of a method <methodId> that has been referenced from # <classId>. recurse on the immediate dependencies in the method code. # # @param deps accumulator variable set((c1,m1), (c2,m2),...) def getTransitiveDepsR(dependencyItem, variantString, totalDeps): # We don't add the in-param to the global result classId = dependencyItem.name methodId = dependencyItem.attribute function_pruned = False cacheId = "methoddeps-%r-%r-%r" % (classId, methodId, variantString) # The bad thing here is that 'variantString' contains environment setting # that normally have no influence on the dependencies (like # 'qx.Application'). So cached deps are rejected for no reason (ex. # building the demos of Demobrowser). But I cannot easily apply # variant-projection here, as it only proves that the current class is # independent of a specific environement key; but its recursive deps could # well be. Fix: Get the shallow deps of the current method from cache, and then get the # trans. deps of those items. They then could appy the same reasoning. if not force: # Check cache cachedDeps, _ = cache.read(cacheId) # no use to put this into a file, due to transitive dependencies to other files if cachedDeps != None: console.debug("using cached result") #print "\nusing cached result for", classId, methodId return cachedDeps # Need to calculate deps console.dot("_") # Check known class if classId not in self._classesObj: console.debug("Skipping unknown class of dependency: %s#%s (%s:%d)" % (classId, methodId, dependencyItem.requestor, dependencyItem.line)) return set() # Check other class elif classId != self.id: classObj = self._classesObj[classId] otherdeps = classObj.getTransitiveDeps(dependencyItem, variants, classMaps, totalDeps, force) return otherdeps # Check own hierarchy defClassId, attribNode = self.findClassForFeature(methodId, variants, classMaps) # lookup error if not defClassId or defClassId not in self._classesObj: console.debug("Skipping unknown definition of dependency: %s#%s (%s:%d)" % (classId, methodId, dependencyItem.requestor, dependencyItem.line)) return set() defDepsItem = DependencyItem(defClassId, methodId, classId) if dependencyItem.isCall: defDepsItem.isCall = True # if the dep is an inherited method being called, pursue the parent method as call localDeps = set() # inherited feature if defClassId != classId: self.resultAdd(defDepsItem, localDeps) defClass = self._classesObj[defClassId] otherdeps = defClass.getTransitiveDeps(defDepsItem, variants, classMaps, totalDeps, force) localDeps.update(otherdeps) return localDeps # Process own deps console.debug("%s#%s dependencies:" % (classId, methodId)) console.indent() if isinstance(attribNode, Node): if (attribNode.getChild("function", False) # is it a function(){..} value? and not dependencyItem.isCall # and the reference was no call ): function_pruned = True pass # don't lift those deps else: # Get the method's immediate deps # TODO: is this the right API?! depslist = [] self._analyzeClassDepsNode(attribNode, depslist, inLoadContext=False) console.debug( "shallow dependencies: %r" % (depslist,)) # This depends on attribNode belonging to current class my_ignores = self.getHints("ignoreDeps") + self.getHints("optionalDeps") my_ignores = map(MetaIgnore, my_ignores) for depsItem in depslist: if depsItem in totalDeps: continue if depsItem.name in my_ignores: continue if self.resultAdd(depsItem, localDeps): # Recurse dependencies downstreamDeps = getTransitiveDepsR(depsItem, variants, totalDeps.union(localDeps)) localDeps.update(downstreamDeps) # Cache update # --- i cannot cache currently, if the deps of a function are pruned # when the function is passed as a ref, rather than called (s. above # around 'attribNode.getChild("function",...)') if not function_pruned: cache.write(cacheId, localDeps, memory=True, writeToFile=False) console.outdent() return localDeps # -- getTransitiveDeps ------------------------------------------------- cache = self.context['cache'] console = self.context['console'] checkset = checkSet or set() variantString = util.toString(variants) deps = getTransitiveDepsR(depsItem, variantString, checkset) # checkset is currently not used, leaving it for now return deps
def dependencies(self, variantSet, force=False): ## # Get deps from meta info and class code, and sort them into # load/run/ignore deps. # # Note: # load time = before class = require # run time = after class = use def buildShallowDeps(): load = [] run = [] ignore = [DependencyItem(x, '', "|DefaultIgnoredNamesDynamic|") for x in self.defaultIgnoredNamesDynamic] console.debug("Analyzing tree: %s" % self.id) console.indent() # Read meta data meta = self.getHints() metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps" , []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps" , []) metaIgnore.extend(metaOptional) # regexify globs in metaignore metaIgnore = map(MetaIgnore, metaIgnore) # Turn strings into DependencyItems() for target,metaHint in ((load,metaLoad), (run,metaRun), (ignore,metaIgnore)): for key in metaHint: # add all feature checks if requested if key == "feature-checks" and metaHint in (metaLoad, metaRun): target.extend(self.getAllEnvChecks(-1, metaHint==metaLoad)) # turn an entry into a DependencyItem elif isinstance(key, types.StringTypes): sig = key.split('#',1) className = sig[0] attrName = sig[1] if len(sig)>1 else '' target.append(DependencyItem(className, attrName, self.id, "|hints|")) # Read source tree data treeDeps = [] # will be filled by _analyzeClassDepsNode self._analyzeClassDepsNode(self.tree(), treeDeps, inLoadContext=True) # Process source tree data for dep in treeDeps: if dep.isLoadDep: if "auto-require" not in metaIgnore: item = dep.name if item in metaIgnore: pass elif item in metaLoad: console.warn("%s: #require(%s) is auto-detected" % (self.id, item)) else: # adding all items to list (the second might have needsRecursion) load.append(dep) else: # runDep if "auto-use" not in metaIgnore: item = dep.name if item in metaIgnore: pass #elif item in (x.name for x in load): # pass elif item in metaRun: console.warn("%s: #use(%s) is auto-detected" % (self.id, item)) else: # adding all items to list (to comply with the 'load' deps) run.append(dep) console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, } return deps def buildTransitiveDeps(shallowDeps): newLoad = set(shallowDeps['load']) classMaps = {} for dep in shallowDeps['load']: if dep.needsRecursion: recDeps = self.getTransitiveDeps(dep, variantSet, classMaps, force=force) # need variantSet here (not relevantVariants), as the recursive deps might depend on any of those newLoad.update(recDeps) shallowDeps['load'] = list(newLoad) return shallowDeps ## # Check wether load dependencies are fresh which are included following # a depsItem.needsRecursion of the current class def transitiveDepsAreFresh(depsStruct, cacheModTime): result = True if cacheModTime is None: # TODO: this can currently only occur with a Cache.memcache result result = False else: for dep in depsStruct["load"]: if dep.requestor != self.id: # this was included through a recursive traversal if dep.name in self._classesObj: classObj = self._classesObj[dep.name] if cacheModTime < classObj.m_time(): #if cacheModTime < classObj.library.mostRecentlyChangedFile()[1]: console.debug("Invalidating dep cache for %s, as %s is newer" % (self.id, classObj.id)) result = False break # checking classObj.m_time() was done a lot, and was a major time consumer, # esp. when building demobrowser; just checking a library's youngest entry is # much faster, as it is only calculated once (when called without (force=True)); # the downside is that a change of one class in a library will result in cache # invalidation for *all* classes in this lib; that's the trade-off; # i'd love to just check the libs directly ("for lib in script.libraries: # if cacheModTime < lib.mostRecentlyChangedFile()[1]:..."), but I don't # have access to the script here in Class. return result # -- Main --------------------------------------------------------- # handles cache and invokes worker function console = self.context['console'] classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) cacheId = "deps-%s-%s" % (self.path, util.toString(relevantVariants)) cached = True classInfo, classInfoMTime = self._getClassCache() (deps, cacheModTime) = classInfo[cacheId] if cacheId in classInfo else (None,None) if (deps == None or force == True or not transitiveDepsAreFresh(deps, cacheModTime)): cached = False deps = buildShallowDeps() deps = buildTransitiveDeps(deps) #if self.id != "qx.core.Environment": # # Mustn't cache q.c.Env deps across runs, as they depend on the entire # # class list if True: classInfo[cacheId] = (deps, time.time()) self._writeClassCache(classInfo) return deps, cached
def classlistFromClassRecursive(depsItem, excludeWithDeps, variants, result, warn_deps, loadDepsChain, allowBlockLoaddeps=True): # support blocking if depsItem.name in excludeWithDeps: if depsItem.isLoadDep and not allowBlockLoaddeps: raise DependencyError() return # check if already in if depsItem.name in resultNames: # string compares are perceivably faster than object compares (as DependencyItem defines __eq__) return # Handle qx.core.Environment if depsItem.name == "qx.core.Environment" and firstTime[0]: envObj = self._classesObj["qx.core.Environment"] envTreeId = "tree-%s-%s" % (envObj.path, util.toString({})) # TODO: {} is a temp. hack self._cache.remove(envTreeId) # clear pot. memcache, so already (string) optimized tree is not optimized again (e.g. with Demobrowser) firstTime[0] = False # reading dependencies self._console.debug("Gathering dependencies: %s" % depsItem.name) self._console.indent() deps, cached = self.getCombinedDeps(depsItem.name, variants, buildType, genProxy=genProxyIter.next()) self._console.outdent() if logInfos: self._console.dot("%s" % "." if cached else "*") # and evaluate them deps["warn"] = self._checkDepsAreKnown(deps) # add 'warn' key to deps ignore_names = [x.name for x in deps["ignore"]] if verifyDeps: for dep in deps["warn"]: if dep.name not in ignore_names: warn_deps.append(dep) # process lists try: skipNames = [x.name for x in deps["warn"] + deps["ignore"]] # cycle detection assert depsItem.name not in loadDepsChain loadDepsChain.append(depsItem.name) for subitem in deps["load"]: # cycle check if subitem.name in loadDepsChain: self._console.warn("Detected circular dependency between: %s and %s" % (depsItem.name, subitem.name)) self._console.indent() self._console.debug("currently explored dependency path: %r" % loadDepsChain) self._console.outdent() raise RuntimeError("Circular class dependencies") if subitem.name not in resultNames and subitem.name not in skipNames: classlistFromClassRecursive(subitem, excludeWithDeps, variants, result, warn_deps, loadDepsChain, allowBlockLoaddeps) ## # putting this here allows sorting and expanding of the class # list in one go! if depsItem.name not in resultNames: result.append(depsItem) resultNames.append(depsItem.name) # cycle check loadDepsChain.remove(depsItem.name) for subitem in deps["run"]: if subitem.name not in resultNames and subitem.name not in skipNames: classlistFromClassRecursive(subitem, excludeWithDeps, variants, result, warn_deps, [], allowBlockLoaddeps) except DependencyError, detail: raise ValueError("Attempt to block load-time dependency of class %s to %s" % (depsItem.name, subitem.name))
# calculate class list recursively for item in includeWithDeps: depsItem = DependencyItem(item, '', '|config|') # calculate dependencies and add required classes classlistFromClassRecursive(depsItem, excludeWithDeps, variants, result, warn_deps, [], allowBlockLoaddeps) #classlistFromClassIterative(depsItem, excludeWithDeps, variants, result, warn_deps, [], allowBlockLoaddeps) self._console.dotclear() #print len(result)," ", # process qx.core.Environment if ("qx.core.Environment" in resultNames and "variants" in script.optimize and not processedEnvironment): envObj = self._classesObj["qx.core.Environment"] envTreeId = "tree-%s-%s" % (envObj.path, util.toString({})) # TODO: {} is a temp. hack compOpts = CompileOptions(optimize=[], variants=variants) compOpts.allClassVariants = script.classVariants([self._classesObj[x] for x in resultNames]) tree = Class.optimizeEnvironmentClass(envObj, compOpts) self._cache.write(envTreeId, tree, memory=True, writeToFile=False) # this is for the side-effect of leaving a modified tree for qx.core.Environmet # in the cache! _ = envObj.dependencies(variants, force=True) # this is for the side-effect of re-calculating the transitive dependencies # of qx.core.Environment! processedEnvironment = True else: # We currently know that one additional iteration is enough, # after optimizeEnvironmentClass() has run. This has to do # with the fact that it only removes dependencies to # qx.bom.client.* classes, which in turn do not use
def dependencies(self, variantSet, force=False, tree=None): ## # Get deps from meta info and class code, and sort them into # load/run/ignore deps. # # Note: # load time = before class = require # run time = after class = use def buildShallowDeps(tree=None): load, run = [], [] ignore = [DependencyItem(x, '', "|DefaultIgnoredNamesDynamic|") for x in self.defaultIgnoredNamesDynamic] console.debug("Getting shallow deps of: %s" % self.id) console.indent() # Read source tree data if not tree: if variantSet: # a filled variantSet map means that "variants" optimization is wanted tree = self.optimize(None, ["variants"], variantSet) else: tree = self.tree() # Get deps from compiler hints if not hasattr(tree, 'hint'): tree = jshints.create_hints_tree(tree) # this will be used by some of the subsequent method calls load_hints, run_hints, ignore_hints, all_feature_checks = self.dependencies_from_comphints(tree) # ignore_hints=[HintArgument] load.extend(load_hints) run.extend(run_hints) load_feature_checks = all_feature_checks[0] run_feature_checks = all_feature_checks[1] # Analyze tree treeDeps = [] # will be filled by _analyzeClassDepsNode self._analyzeClassDepsNode(tree, treeDeps, inLoadContext=True) # Filter lexical deps through ignore_hints load1, run1, ignore1 = self.filter_symbols_by_comphints(treeDeps, ignore_hints) # load and run are being filtered, ignore contains the actually filtered depsItems # integrate into existing lists load_hint_names = [str(x) for x in load_hints] for dep in load1: if str(dep) in load_hint_names and not load_feature_checks: console.warn("%s: @require(%s) is auto-detected" % (self.id, dep)) load.append(dep) run_hint_names = [str(x) for x in run_hints] for dep in run1: if str(dep) in run_hint_names and not run_feature_checks: console.warn("%s: @use(%s) is auto-detected" % (self.id, dep)) run.append(dep) ignore.extend(ignore1) console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, } return deps def buildTransitiveDeps(shallowDeps): newLoad = set(shallowDeps['load']) classMaps = {} for dep in shallowDeps['load']: if dep.needsRecursion: recDeps = self.getTransitiveDeps(dep, variantSet, classMaps, force=force) # need variantSet here (not relevantVariants), as the recursive deps might depend on any of those for recdep in recDeps: recdep.isLoadDep = True # all these become load dependencies newLoad.update(recDeps) shallowDeps['load'] = list(newLoad) return shallowDeps ## # Check wether load dependencies are fresh which are included following # a depsItem.needsRecursion of the current class def transitiveDepsAreFresh(depsStruct, cacheModTime): result = True if cacheModTime is None: # TODO: this can currently only occur with a Cache.memcache result result = False else: for dep in depsStruct["load"]: if dep.requestor != self.id: # this was included through a recursive traversal if dep.name in ClassesAll: classObj = ClassesAll[dep.name] if cacheModTime < classObj.m_time(): #if cacheModTime < classObj.library.mostRecentlyChangedFile()[1]: console.debug("Invalidating dep cache for %s, as %s is newer" % (self.id, classObj.id)) result = False break # checking classObj.m_time() was done a lot, and was a major time consumer, # esp. when building demobrowser; just checking a library's youngest entry is # much faster, as it is only calculated once (when called without (force=True)); # the downside is that a change of one class in a library will result in cache # invalidation for *all* classes in this lib; that's the trade-off; # i'd love to just check the libs directly ("for lib in script.libraries: # if cacheModTime < lib.mostRecentlyChangedFile()[1]:..."), but I don't # have access to the script here in Class. return result # -- Main --------------------------------------------------------- # handles cache and invokes worker function console = self.context['console'] classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) cacheId = "deps-%s-%s" % (self.path, util.toString(relevantVariants)) cached = True # try compile cache classInfo, classInfoMTime = self._getClassCache() (deps, cacheModTime) = classInfo[cacheId] if cacheId in classInfo else (None,None) # try dependencies.json if (True # just a switch and deps == None # TODO: temp. hack to work around issue with 'statics' optimization and dependencies.json and 'statics' not in Context.jobconf.get("compile-options/code/optimize",[]) ): deps_json, cacheModTime = self.library.getDependencies(self.id) if deps_json is not None: #console.info("using dependencies.json for: %s" % self.id) deps = self.depsItems_from_Json(deps_json) # don't cache at all, so later 'statics' optimized jobs don't # pick up the short depsList from cache if (deps == None or force == True or not transitiveDepsAreFresh(deps, cacheModTime)): cached = False deps = buildShallowDeps(tree) deps = buildTransitiveDeps(deps) if not tree: # don't cache for a passed-in tree classInfo[cacheId] = (deps, time.time()) self._writeClassCache(classInfo) return deps, cached
def dependencies(self, variantSet): ## # get deps from meta info and class code def buildShallowDeps(): ## # get deps from class code (tree) def analyzeClassTree(variantSet): loadtimeDeps = [] runtimeDeps = [] undefDeps = [] tree = self.tree(variantSet) self._analyzeClassDepsNode(tree, loadtimeDeps, runtimeDeps, undefDeps, False, variantSet) return loadtimeDeps, runtimeDeps, undefDeps # ------------------------------------------------------ # Notes: # load time = before class = require # runtime = after class = use load = [] run = [] ignore = [DependencyItem(x,-1) for x in self._defaultIgnore] console.debug("Gathering dependencies: %s" % fileId) console.indent() # Read meta data meta = self.getMeta(self.id) metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps" , []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps" , []) # Process meta data load.extend(DependencyItem(x,-1) for x in metaLoad) run.extend(DependencyItem(x,-1) for x in metaRun) ignore.extend(DependencyItem(x,-1) for x in metaIgnore) # Read content data (autoLoad, autoRun, autoWarn) = analyzeClassTree(variantSet) # Process content data if not "auto-require" in metaIgnore: for dep in autoLoad: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): console.warn("%s: #require(%s) is auto-detected" % (fileId, item)) else: load.append(dep) if not "auto-use" in metaIgnore: for dep in autoRun: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): pass elif item in (x.name for x in run): console.warn("%s: #use(%s) is auto-detected" % (fileId, item)) else: run.append(dep) console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, 'undef' : autoWarn } return deps # -- Main --------------------------------------------------------- # handles cache and invokes worker function classVariants = clazz.classVariants() relevantVariants = projectClassVariantsToCurrent(classVariants, variantSet) cacheId = "deps-%s-%s" % (self.path, util.toString(relevantVariants)) deps = cache.readmulti(cacheId, self.path) if deps == None: deps = buildShallowDeps() cache.writemulti(cacheId, deps) return deps
def dependencies(self, variantSet, force=False, tree=None): ## # Get deps from meta info and class code, and sort them into # load/run/ignore deps. # # Note: # load time = before class = require # run time = after class = use def buildShallowDeps(tree=None): load = [] run = [] ignore = [ DependencyItem(x, '', "|DefaultIgnoredNamesDynamic|") for x in self.defaultIgnoredNamesDynamic ] console.debug("Analyzing tree: %s" % self.id) console.indent() # Read meta data meta = self.getHints() metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps", []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps", []) metaIgnore.extend(metaOptional) # regexify globs in metaignore metaIgnore = map(MetaIgnore, metaIgnore) # Turn strings into DependencyItems() for target, metaHint in ((load, metaLoad), (run, metaRun), (ignore, metaIgnore)): for key in metaHint: # add all feature checks if requested if key == "feature-checks" and metaHint in (metaLoad, metaRun): target.extend( self.getAllEnvChecks(-1, metaHint == metaLoad)) # turn an entry into a DependencyItem elif isinstance(key, types.StringTypes): sig = key.split('#', 1) className = sig[0] attrName = sig[1] if len(sig) > 1 else '' target.append( DependencyItem(className, attrName, self.id, "|hints|")) # Read source tree data if not tree: if variantSet: # a filled variantSet map means that "variants" optimization is wanted tree = self.optimize(None, ["variants"], variantSet) else: tree = self.tree() # do lint checking here, as we have a classList ("ClassesAll") to check globals against if True: # construct parse-level lint options opts = lint.defaultOptions() opts.library_classes = ClassesAll.keys() opts.class_namespaces = ClassList.namespaces_from_classnames( opts.library_classes) # some sensible settings (deviating from defaultOptions) opts.ignore_no_loop_block = True opts.ignore_reference_fields = True opts.ignore_undeclared_privates = True opts.ignore_unused_variables = True # override from config jobConf = Context.jobconf for option, value in jobConf.get("lint-check", {}).items(): setattr(opts, option.replace("-", "_"), value) lint.lint_check(tree, self.id, opts) # analyze tree treeDeps = [] # will be filled by _analyzeClassDepsNode self._analyzeClassDepsNode(tree, treeDeps, inLoadContext=True) # Process source tree data for dep in treeDeps: if dep.isLoadDep: if "auto-require" not in metaIgnore: item = dep.name if item in metaIgnore: pass elif item in metaLoad: console.warn("%s: #require(%s) is auto-detected" % (self.id, item)) else: # adding all items to list (the second might have needsRecursion) load.append(dep) else: # runDep if "auto-use" not in metaIgnore: item = dep.name if item in metaIgnore: pass #elif item in (x.name for x in load): # pass elif item in metaRun: console.warn("%s: #use(%s) is auto-detected" % (self.id, item)) else: # adding all items to list (to comply with the 'load' deps) run.append(dep) console.outdent() # Build data structure deps = { "load": load, "run": run, "ignore": ignore, } return deps def buildTransitiveDeps(shallowDeps): newLoad = set(shallowDeps['load']) classMaps = {} for dep in shallowDeps['load']: if dep.needsRecursion: recDeps = self.getTransitiveDeps( dep, variantSet, classMaps, force=force ) # need variantSet here (not relevantVariants), as the recursive deps might depend on any of those for recdep in recDeps: recdep.isLoadDep = True # all these become load dependencies newLoad.update(recDeps) shallowDeps['load'] = list(newLoad) return shallowDeps ## # Check wether load dependencies are fresh which are included following # a depsItem.needsRecursion of the current class def transitiveDepsAreFresh(depsStruct, cacheModTime): result = True if cacheModTime is None: # TODO: this can currently only occur with a Cache.memcache result result = False else: for dep in depsStruct["load"]: if dep.requestor != self.id: # this was included through a recursive traversal if dep.name in ClassesAll: classObj = ClassesAll[dep.name] if cacheModTime < classObj.m_time(): #if cacheModTime < classObj.library.mostRecentlyChangedFile()[1]: console.debug( "Invalidating dep cache for %s, as %s is newer" % (self.id, classObj.id)) result = False break # checking classObj.m_time() was done a lot, and was a major time consumer, # esp. when building demobrowser; just checking a library's youngest entry is # much faster, as it is only calculated once (when called without (force=True)); # the downside is that a change of one class in a library will result in cache # invalidation for *all* classes in this lib; that's the trade-off; # i'd love to just check the libs directly ("for lib in script.libraries: # if cacheModTime < lib.mostRecentlyChangedFile()[1]:..."), but I don't # have access to the script here in Class. return result # -- Main --------------------------------------------------------- # handles cache and invokes worker function console = self.context['console'] classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent( classVariants, variantSet) cacheId = "deps-%s-%s" % (self.path, util.toString(relevantVariants)) cached = True # try compile cache classInfo, classInfoMTime = self._getClassCache() (deps, cacheModTime) = classInfo[cacheId] if cacheId in classInfo else (None, None) # try dependencies.json if (True # just a switch and deps == None # TODO: temp. hack to work around issue with 'statics' optimization and dependencies.json and 'statics' not in Context.jobconf.get( "compile-options/code/optimize", [])): deps_json, cacheModTime = self.library.getDependencies(self.id) if deps_json is not None: #console.info("using dependencies.json for: %s" % self.id) deps = self.depsItems_from_Json(deps_json) # don't cache at all, so later 'statics' optimized jobs don't # pick up the short depsList from cache if (deps == None or force == True or not transitiveDepsAreFresh(deps, cacheModTime)): cached = False deps = buildShallowDeps(tree) deps = buildTransitiveDeps(deps) if not tree: # don't cache for a passed-in tree classInfo[cacheId] = (deps, time.time()) self._writeClassCache(classInfo) return deps, cached
def dependencies(self, variantSet, force=False, tree=None): ## # Get deps from meta info and class code, and sort them into # load/run/ignore deps. # # Note: # load time = before class = require # run time = after class = use def buildShallowDeps(tree=None): load, run = [], [] ignore = [DependencyItem(x, '', "|DefaultIgnoredNamesDynamic|") for x in self.defaultIgnoredNamesDynamic] console.debug("Getting shallow deps of: %s" % self.id) console.indent() # Read source tree data if not tree: if variantSet: # a filled variantSet map means that "variants" optimization is wanted tree = self.optimize(None, ["variants"], variantSet) else: tree = self.tree() # Get deps from compiler hints if not hasattr(tree, 'hint'): tree = jshints.create_hints_tree(tree) # this will be used by some of the subsequent method calls load_hints, run_hints, ignore_hints, all_feature_checks = self.dependencies_from_comphints(tree) # ignore_hints=[HintArgument] load.extend(load_hints) run.extend(run_hints) load_feature_checks = all_feature_checks[0] run_feature_checks = all_feature_checks[1] # Analyze tree treeDeps = [] # will be filled by _analyzeClassDepsNode self._analyzeClassDepsNode(tree, treeDeps, inLoadContext=True) # Filter lexical deps through ignore_hints load1, run1, ignore1 = self.filter_symbols_by_comphints(treeDeps, ignore_hints) # load and run are being filtered, ignore contains the actually filtered depsItems # integrate into existing lists load_hint_names = [str(x) for x in load_hints] for dep in load1: if str(dep) in load_hint_names and not load_feature_checks: console.warn("%s: @require(%s) is auto-detected" % (self.id, dep)) load.append(dep) run_hint_names = [str(x) for x in run_hints] for dep in run1: if str(dep) in run_hint_names and not run_feature_checks: console.warn("%s: @use(%s) is auto-detected" % (self.id, dep)) run.append(dep) ignore.extend(ignore1) console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, } return deps def buildTransitiveDeps(shallowDeps): newLoad = set(shallowDeps['load']) classMaps = {} for dep in shallowDeps['load']: if dep.needsRecursion: recDeps = self.getTransitiveDeps(dep, variantSet, classMaps, force=force) # need variantSet here (not relevantVariants), as the recursive deps might depend on any of those for recdep in recDeps: recdep.isLoadDep = True # all these become load dependencies newLoad.update(recDeps) shallowDeps['load'] = list(newLoad) return shallowDeps ## # Check wether load dependencies are fresh which are included following # a depsItem.needsRecursion of the current class def transitiveDepsAreFresh(depsStruct, cacheModTime): result = True if cacheModTime is None: # TODO: this can currently only occur with a Cache.memcache result result = False else: for dep in depsStruct["load"]: if dep.requestor != self.id: # this was included through a recursive traversal if dep.name in ClassesAll: classObj = ClassesAll[dep.name] if cacheModTime < classObj.m_time(): #if cacheModTime < classObj.library.mostRecentlyChangedFile()[1]: console.debug("Invalidating dep cache for %s, as %s is newer" % (self.id, classObj.id)) result = False break # checking classObj.m_time() was done a lot, and was a major time consumer, # esp. when building demobrowser; just checking a library's youngest entry is # much faster, as it is only calculated once (when called without (force=True)); # the downside is that a change of one class in a library will result in cache # invalidation for *all* classes in this lib; that's the trade-off; # i'd love to just check the libs directly ("for lib in script.libraries: # if cacheModTime < lib.mostRecentlyChangedFile()[1]:..."), but I don't # have access to the script here in Class. return result # -- Main --------------------------------------------------------- # handles cache and invokes worker function console = self.context['console'] classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) statics_optim = 'statics' in Context.jobconf.get("compile-options/code/optimize",[]) cacheId = "deps-%s-%s-%s" % (self.path, util.toString(relevantVariants), int(statics_optim)) cached = True # try compile cache classInfo, classInfoMTime = self._getClassCache() (deps, cacheModTime) = classInfo[cacheId] if cacheId in classInfo else (None,None) # try dependencies.json if (True # just a switch and deps == None # TODO: temp. hack to work around issue with 'statics' optimization and dependencies.json and not statics_optim ): deps_json, cacheModTime = self.library.getDependencies(self.id) if deps_json is not None: #console.info("using dependencies.json for: %s" % self.id) deps = self.depsItems_from_Json(deps_json) # don't cache at all, so later 'statics' optimized jobs don't # pick up the short depsList from cache if (deps == None or force == True or not transitiveDepsAreFresh(deps, cacheModTime)): cached = False deps = buildShallowDeps(tree) deps = buildTransitiveDeps(deps) if not tree: # don't cache for a passed-in tree classInfo[cacheId] = (deps, time.time()) self._writeClassCache(classInfo) return deps, cached
def getTransitiveDeps(self, depsItem, variants, classMaps, checkSet=None, force=False): ## # find dependencies of a method <methodId> that has been referenced from # <classId>. recurse on the immediate dependencies in the method code. # # @param deps accumulator variable set((c1,m1), (c2,m2),...) def getTransitiveDepsR(dependencyItem, variantString, totalDeps): # We don't add the in-param to the global result classId = dependencyItem.name methodId = dependencyItem.attribute function_pruned = False cacheId = "methoddeps-%r-%r-%r" % (classId, methodId, variantString) # The bad thing here is that 'variantString' contains environment setting # that normally have no influence on the dependencies (like # 'qx.Application'). So cached deps are rejected for no reason (ex. # building the demos of Demobrowser). But I cannot easily apply # variant-projection here, as it only proves that the current class is # independent of a specific environement key; but its recursive deps could # well be. Fix: Get the shallow deps of the current method from cache, and then get the # trans. deps of those items. They then could appy the same reasoning. if not force: # Check cache cachedDeps, _ = cache.read(cacheId) # no use to put this into a file, due to transitive dependencies to other files if cachedDeps != None: console.debug("using cached result") #print "\nusing cached result for", classId, methodId return cachedDeps # Need to calculate deps console.dot(1) # Check known class if classId not in ClassesAll: console.debug("Skipping unknown class of dependency: %s#%s (%s:%d)" % (classId, methodId, dependencyItem.requestor, dependencyItem.line)) return set() # Check other class elif classId != self.id: classObj = ClassesAll[classId] otherdeps = classObj.getTransitiveDeps(dependencyItem, variants, classMaps, totalDeps, force) return otherdeps # Check own hierarchy defClassId, attribNode = self.findClassForFeature(methodId, variants, classMaps) # lookup error if not defClassId or defClassId not in ClassesAll: console.debug("Skipping unknown definition of dependency: %s#%s (%s:%d)" % (classId, methodId, dependencyItem.requestor, dependencyItem.line)) return set() defDepsItem = DependencyItem(defClassId, methodId, classId) if dependencyItem.isCall: defDepsItem.isCall = True # if the dep is an inherited method being called, pursue the parent method as call localDeps = set() # inherited feature if defClassId != classId: self.resultAdd(defDepsItem, localDeps) defClass = ClassesAll[defClassId] otherdeps = defClass.getTransitiveDeps(defDepsItem, variants, classMaps, totalDeps, force) localDeps.update(otherdeps) return localDeps # Process own deps console.debug("%s#%s dependencies:" % (classId, methodId)) console.indent() if isinstance(attribNode, Node): if (attribNode.getChild("function", False) # is it a function(){..} value? and not dependencyItem.isCall # and the reference was no call ): function_pruned = True pass # don't lift those deps else: # Get the method's immediate deps # TODO: is this the right API?! depslist = [] if attribNode.type == 'value': attribNode = attribNode.children[0] self._analyzeClassDepsNode(attribNode, depslist, inLoadContext=False) console.debug( "shallow dependencies: %r" % (depslist,)) # This depends on attribNode belonging to current class my_ignores = self.getHints("ignoreDeps") + self.getHints("optionalDeps") my_ignores = map(HintArgument, my_ignores) for depsItem in depslist: if depsItem in totalDeps: continue if depsItem.name in my_ignores: continue if self.resultAdd(depsItem, localDeps): totalDeps = totalDeps.union(localDeps) # Recurse dependencies downstreamDeps = getTransitiveDepsR(depsItem, variants, totalDeps) localDeps.update(downstreamDeps) # Cache update # --- i cannot cache currently, if the deps of a function are pruned # when the function is passed as a ref, rather than called (s. above # around 'attribNode.getChild("function",...)') if not function_pruned: cache.write(cacheId, localDeps, memory=True, writeToFile=False) console.outdent() return localDeps # -- getTransitiveDeps ------------------------------------------------- cache = self.context['cache'] console = self.context['console'] checkset = checkSet or set() variantString = util.toString(variants) deps = getTransitiveDepsR(depsItem, variantString, checkset) # checkset is currently not used, leaving it for now return deps
def dependencies(self, variantSet): ## # get deps from meta info and class code # # Notes: # load time = before class = require # run time = after class = use def buildShallowDeps(): load = [] run = [] ignore = [DependencyItem(x,-1) for x in DefaultIgnoredNamesDynamic] console.debug("Analyzing tree: %s" % self.id) console.indent() # Read meta data meta = self.getHints() metaLoad = meta.get("loadtimeDeps", []) metaRun = meta.get("runtimeDeps" , []) metaOptional = meta.get("optionalDeps", []) metaIgnore = meta.get("ignoreDeps" , []) # Process meta data load.extend(DependencyItem(x,-1) for x in metaLoad) run.extend(DependencyItem(x,-1) for x in metaRun) ignore.extend(DependencyItem(x,-1) for x in metaIgnore) # Read source tree data (treeLoad, treeRun) = ([], []) # will be filled by _analyzeClassDepsNode self._analyzeClassDepsNode(self.tree(variantSet), treeLoad, treeRun, inFunction=False, variants=variantSet) # Process source tree data if not "auto-require" in metaIgnore: for dep in treeLoad: item = dep.name if item in metaOptional: pass elif item in metaLoad: console.warn("%s: #require(%s) is auto-detected" % (self.id, item)) else: # force uniqueness on the class name if item not in (x.name for x in load): load.append(dep) if not "auto-use" in metaIgnore: for dep in treeRun: item = dep.name if item in metaOptional: pass elif item in (x.name for x in load): pass elif item in metaRun: console.warn("%s: #use(%s) is auto-detected" % (self.id, item)) else: # force uniqueness on the class name if item not in (x.name for x in run): run.append(dep) console.outdent() # Build data structure deps = { "load" : load, "run" : run, "ignore" : ignore, } return deps # -- Main --------------------------------------------------------- # handles cache and invokes worker function classVariants = self.classVariants() relevantVariants = projectClassVariantsToCurrent(classVariants, variantSet) cacheId = "deps-%s-%s" % (self.path, util.toString(relevantVariants)) cached = True deps = cache.readmulti(cacheId, self.path) if deps == None: cached = False deps = buildShallowDeps() cache.writemulti(cacheId, deps) return deps, cached
def getTreeCacheId(optimize=[], variantSet={}): classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent(classVariants, variantSet) return "tree-%s-%s-%s" % (self.path, self._optimizeId(optimize), util.toString(relevantVariants))
def getMethodDepsR(classId, methodId, variants, deps): self._console.debug("%s#%s dependencies:" % (classId, methodId)) self._console.indent() # Check cache filePath= self._classes[classId]["path"] cacheId = "methoddeps-%r-%r-%r" % (classId, methodId, util.toString(variants)) ndeps = self._cache.read(cacheId, memory=True) # no use to put this into a file, due to transitive dependencies to other files if ndeps != None: self._console.debug("using cached result") #deps.update(ndeps) self._console.outdent() return ndeps # Calculate deps # find the defining class clazzId, attribValNode = findClassForMethod(classId, methodId, variants) # Get the method's immediate deps deps_rt = [] getReferencesFromSource(clazzId, attribValNode, deps_rt) ndeps= set(()) # put into right format for dep in deps_rt: assId = reduceAssembled(dep) if assId == u'': # unknown class self._console.info("Skipping unknown id: %r" % dep) continue clazzId, methId = splitClassAttribute(assId, dep) ndeps.add((clazzId,methId)) self._console.debug("Code references: %r" % list(ndeps)) # Recurse on the immediate deps ndepslist = list(ndeps) ndeps = set(()) # will be re-populated with brushed values (e.g. 'this' gone) for clazzId, methId in ndepslist: if clazzId == "this": clazzId = classId ndeps.add((clazzId, methId)) nclazzId, methValNode = findClassForMethod(clazzId, methId, variants) # find the original class methId was defined in if not nclazzId: self._console.warn("Skipping unknown class dependency: %s#%s" % (clazzId, methId)) elif nclazzId == True: # this must be a known global (like Error, Regexp, ...) self._console.debug("Dependency automatically fullfilled: %s#%s" % (clazzId, methId)) continue else: clazzId = nclazzId # cyclic check if (clazzId, methId) in deps: #if (clazzId, methId) == (classId, methodId): self._console.debug("Class.method already seen, skipping: %s#%s" % (clazzId, methId)) continue else: ndeps.add((clazzId, methId)) assert clazzId in self._classes r = getMethodDepsR(clazzId, methId, variants, deps.union(ndeps)) # recursive call ndeps.update(r) # Cache update self._cache.write(cacheId, ndeps, memory=True, writeToFile=False) # accumulator update #deps.update(ndeps) self._console.debug("Recursive dependencies: %r" % list(ndeps)) self._console.outdent() return ndeps
def getTreeCacheId(optimize=[], variantSet={}): classVariants = self.classVariants() relevantVariants = self.projectClassVariantsToCurrent( classVariants, variantSet) return "tree-%s-%s-%s" % (self.path, self._optimizeId(optimize), util.toString(relevantVariants))