def storeApi(self, include, apiPath, variantSet, verify): self._console.info("Generating API data...") self._console.indent() docTree = tree.Node("doctree") length = len(include) self._console.info("Loading class docs...", False) self._console.indent() packages = [] hasErrors = False for pos, fileId in enumerate(include): self._console.progress(pos+1, length) fileApi = self.getApi(fileId, variantSet) if fileApi == None: hasErrors = True # Only continue merging if there were no errors if not hasErrors: self._mergeApiNodes(docTree, fileApi) pkgId = self._classesObj[fileId].package # make sure all parent packages are included nsparts = pkgId.split('.') for i in range(len(nsparts)+1): parentPkg = ".".join(nsparts[0:i]) if not parentPkg in packages: packages.append(parentPkg) self._console.outdent() if hasErrors: self._console.error("Found erroneous API information. Please see above. Stopping!") return self._console.info("Loading package docs...") self._console.indent() packages.sort() for pkgId in packages: self._mergeApiNodes(docTree, self.getPackageApi(pkgId)) self._console.outdent() self._console.info("Connecting classes...") api.connectPackage(docTree, docTree) self._console.info("Generating search index...") index = self.docTreeToSearchIndex(docTree, "", "", "") if verify and "links" in verify: self.verifyLinks(docTree, index) self._console.info("Saving data...", False) self._console.indent() packageData = api.getPackageData(docTree) packageJson = json.dumps(packageData) filetool.save(os.path.join(apiPath, "apidata.json"), packageJson) length = 0 for classData in api.classNodeIterator(docTree): length += 1 pos = 0 for classData in api.classNodeIterator(docTree): pos += 1 self._console.progress(pos, length) nodeData = tree.getNodeData(classData) nodeJson = json.dumps(nodeData) fileName = os.path.join(apiPath, classData.get("fullName") + ".json") filetool.save(fileName, nodeJson) self._console.outdent() self._console.info("Saving index...") indexContent = json.dumps(index, separators=(',',':'), sort_keys=True) # compact encoding filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent) self._console.outdent() self._console.info("Done")
def storeApi(self, include, apiPath): self._console.info("Generating API data...") self._console.indent() docTree = tree.Node("doctree") length = len(include) self._console.info("Loading class docs...", False) self._console.indent() packages = [] hasErrors = False for pos, fileId in enumerate(include): self._console.progress(pos + 1, length) fileApi = self.getApi(fileId) if fileApi == None: hasErrors = True # Only continue merging if there were no errors if not hasErrors: self._mergeApiNodes(docTree, fileApi) pkgId = self._classesObj[fileId].package # make sure all parent packages are included nsparts = pkgId.split('.') for i in range(len(nsparts) + 1): parentPkg = ".".join(nsparts[0:i]) if not parentPkg in packages: packages.append(parentPkg) self._console.outdent() if hasErrors: self._console.error( "Found erroneous API information. Please see above. Stopping!") return self._console.info("Loading package docs...") self._console.indent() packages.sort() for pkgId in packages: self._mergeApiNodes(docTree, self.getPackageApi(pkgId)) self._console.outdent() self._console.info("Connecting classes...") api.connectPackage(docTree, docTree) self._console.info("Generating search index...") indexContent = self.docTreeToSearchIndex(docTree, "", "", "") self._console.info("Saving data...", False) self._console.indent() packageData = api.getPackageData(docTree) packageJson = json.dumps(packageData) filetool.save(os.path.join(apiPath, "apidata.json"), packageJson) length = 0 for classData in api.classNodeIterator(docTree): length += 1 pos = 0 for classData in api.classNodeIterator(docTree): pos += 1 self._console.progress(pos, length) nodeData = tree.getNodeData(classData) nodeJson = json.dumps(nodeData) fileName = os.path.join(apiPath, classData.get("fullName") + ".json") filetool.save(fileName, nodeJson) self._console.outdent() self._console.info("Saving index...") filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent) self._console.outdent() self._console.info("Done")
def storeApi(self, include, apiPath, variantSet, verify): self._console.info("Generating API data...") self._console.indent() docTree = tree.Node("doctree") docTree.set("fullName", "") docTree.set("name", "") docTree.set("packageName", "") length = len(include) self._console.info("Loading class docs...", False) self._console.indent() packages = [] AttachMap = {} hasErrors = False for pos, fileId in enumerate(include): self._console.progress(pos+1, length) fileApi, attachMap = self.getApi(fileId, variantSet) if fileApi == None: hasErrors = True # Only continue merging if there were no errors if not hasErrors: # update AttachMap for cls in attachMap: # 'qx.Class', 'qx.core.Object', 'q', ... if cls not in AttachMap: AttachMap[cls] = attachMap[cls] else: for section in attachMap[cls]: # 'statics', 'members' if section not in AttachMap[cls]: AttachMap[cls][section] = attachMap[cls][section] else: for method in attachMap[cls][section]: # 'define', 'showToolTip', ... if method not in AttachMap[cls][section]: AttachMap[cls][section][method] = attachMap[cls][section][method] else: self._console.warn("Multiple @attach for same target '%s::%s#%s'." % (cls, section, method)) self._mergeApiNodes(docTree, fileApi) pkgId = self._classesObj[fileId].package # make sure all parent packages are included nsparts = pkgId.split('.') for i in range(len(nsparts)+1): parentPkg = ".".join(nsparts[0:i]) if not parentPkg in packages: packages.append(parentPkg) self._console.outdent() if hasErrors: self._console.error("Found erroneous API information. Please see above. Stopping!") return self._console.info("Loading package docs...") self._console.indent() packages.sort() for pkgId in packages: self._mergeApiNodes(docTree, self.getPackageApi(pkgId)) self._console.outdent() self._console.info("Connecting classes...") api.connectPackage(docTree, docTree) self._console.info("Generating search index...") index = self.docTreeToSearchIndex(docTree, "", "", "") if verify and "links" in verify: self.verifyLinks(docTree, index) self._console.info("Saving data...", False) self._console.indent() packageData = api.getPackageData(docTree) packageJson = json.dumps(packageData) filetool.save(os.path.join(apiPath, "apidata.json"), packageJson) # apply the @attach information for classData in api.classNodeIterator(docTree): className = classData.get("fullName") if className in AttachMap: self._applyAttachInfo(className, classData, AttachMap[className]) # write per-class .json to disk length = 0 for classData in api.classNodeIterator(docTree): length += 1 pos = 0 for classData in api.classNodeIterator(docTree): pos += 1 self._console.progress(pos, length) nodeData = tree.getNodeData(classData) nodeJson = json.dumps(nodeData) fileName = os.path.join(apiPath, classData.get("fullName") + ".json") filetool.save(fileName, nodeJson) self._console.outdent() # writ apiindex.json self._console.info("Saving index...") indexContent = json.dumps(index, separators=(',',':'), sort_keys=True) # compact encoding filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent) self._console.outdent() self._console.info("Done")
def storeApi(self, include, apiPath, variantSet, jobConf): self._console.info("Generating API data...") self._console.indent() docTree = tree.Node("doctree") docTree.set("fullName", "") docTree.set("name", "") docTree.set("packageName", "") length = len(include) self._console.info("Loading class docs...", False) self._console.indent() packages = [] AttachMap = {} hasErrors = False for pos, fileId in enumerate(include): self._console.progress(pos+1, length) fileApi, attachMap = self.getApi(fileId, variantSet) if fileApi == None: hasErrors = True # Only continue merging if there were no errors if not hasErrors: # update AttachMap for cls in attachMap: # 'qx.Class', 'qx.core.Object', 'q', ... if cls not in AttachMap: AttachMap[cls] = attachMap[cls] else: for section in attachMap[cls]: # 'statics', 'members' if section not in AttachMap[cls]: AttachMap[cls][section] = attachMap[cls][section] else: for method in attachMap[cls][section]: # 'define', 'showToolTip', ... if method not in AttachMap[cls][section]: AttachMap[cls][section][method] = attachMap[cls][section][method] else: self._console.warn("Multiple @attach for same target '%s::%s#%s'." % (cls, section, method)) self._mergeApiNodes(docTree, fileApi) pkgId = self._classesObj[fileId].package # make sure all parent packages are included nsparts = pkgId.split('.') for i in range(len(nsparts)+1): parentPkg = ".".join(nsparts[0:i]) if not parentPkg in packages: packages.append(parentPkg) self._console.outdent() if hasErrors: self._console.error("Found erroneous API information. Please see above. Stopping!") return self._console.info("Loading package docs...") self._console.indent() packages.sort() for pkgId in packages: self._mergeApiNodes(docTree, self.getPackageApi(pkgId)) self._console.outdent() self._console.info("Connecting classes... ", feed=False) api.connectPackage(docTree, docTree) self._console.dotclear() self._console.info("Generating search index...") index = self.docTreeToSearchIndex(docTree, "", "", "") if "verify" in jobConf: if "links" in jobConf["verify"]: api.verifyLinks(docTree, index) if "types" in jobConf["verify"]: api.verifyTypes(docTree, index) if "warnings" in jobConf and "output" in jobConf["warnings"]: api.logErrors(docTree, jobConf["warnings"]["output"]) if "verify" in jobConf: if "statistics" in jobConf["verify"]: api.verifyDocPercentage(docTree) self._console.info("Saving data...", False) self._console.indent() packageData = api.getPackageData(docTree) packageJson = json.dumps(packageData) filetool.save(os.path.join(apiPath, "apidata.json"), packageJson) # apply the @attach information for classData in api.classNodeIterator(docTree): className = classData.get("fullName") if className in AttachMap: self._applyAttachInfo(className, classData, AttachMap[className]) # write per-class .json to disk length = 0 for classData in api.classNodeIterator(docTree): length += 1 links = [] pos = 0 for classData in api.classNodeIterator(docTree): pos += 1 self._console.progress(pos, length) nodeData = tree.getNodeData(classData) nodeJson = json.dumps(nodeData) className = classData.get("fullName") fileName = os.path.join(apiPath, className + ".json") filetool.save(fileName, nodeJson) sitemap = False if "sitemap" in jobConf: sitemap = jobConf["sitemap"] if "link-uri" in sitemap: links.append(sitemap["link-uri"] % className) #import pdb; pdb.set_trace() #for type in ["method", "method-static", "event", "property", "constant"]: # for item in classData.getAllChildrenOfType(type): # itemName = className + "~" + item.attributes["name"] # link = linkPrefix + itemName self._console.outdent() # write apiindex.json self._console.info("Saving index...") indexContent = json.dumps(index, separators=(', ', ':'), sort_keys=True) # compact encoding filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent) # save sitemap if sitemap and len(links) > 0: self._console.info("Saving XML sitemap...") sitemapData = self.getSitemap(links) if "file" in sitemap: sitemapFile = sitemap["file"] else: sitemapFile = os.path.join(apiPath, "sitemap.xml") filetool.save(sitemapFile, sitemapData) self._console.outdent() self._console.info("Done")