def module(r, package_name, module_name): page = 'apibrowser' doc_file = '.'.join((module_name,'md')) text = open(os.path.join(settings.SDKPACKAGESDIR,package_name,'docs',doc_file)).read() # changing the tuples to dictionaries hunks = list(apiparser.parse_hunks(text)) data = [] for h in hunks: # convert JSON to a nice list if h[0] == 'api-json': entity = h[1] entity['template'] = '_entity_%s.html' % entity['type'] data.append(entity) module = { 'name': module_name, 'info': hunks[0][1], 'data': data, 'hunks': hunks } package = {'name': package_name, 'modules': _get_module_names(package_name)} return render_to_response( 'module_doc.html', locals(), context_instance=RequestContext(r))
def test_basic(self): """ exporting hunks """ docs_dir = os.path.join(settings.VIRTUAL_ENV,'src/jetpack-sdk/packages/jetpack-core/docs') text = open(os.path.join(docs_dir,'url.md')).read() self.failUnless(len(list(apiparser.parse_hunks(text))) > 0)
def package(r, package_name='jetpack-core'): """ containing a listing of all modules docs """ page = 'apibrowser' DOC_FILES = _get_module_filenames(package_name) package = {'name': package_name, 'modules': []} for d in DOC_FILES: text = open(os.path.join(settings.SDKPACKAGESDIR,package_name,'docs',d)).read() (doc_name, extension) = os.path.splitext(d) # changing the tuples to dictionaries hunks = list(apiparser.parse_hunks(text)) data = {} for h in hunks: data[h[0]] = h[1] package['modules'].append({ 'name': doc_name, 'info': hunks[0][1], 'data': data, 'hunks': hunks }) return render_to_response( 'package_doc.html', locals(), context_instance=RequestContext(r))
def module(r, package_name, module_name): page = 'apibrowser' sdk_version = SDKVERSION doc_file = '.'.join((module_name, 'md')) text = open( os.path.join(SDKPACKAGESDIR, package_name, 'docs', doc_file)).read() # changing the tuples to dictionaries hunks = list(apiparser.parse_hunks(text)) entities = [] for h in hunks: # convert JSON to a nice list if h[0] == 'api-json': h[1]['json_type'] = "api" entities.append(h[1]) module = { 'name': module_name, 'info': hunks[0][1], 'entities': entities, 'hunks': hunks } package = {'name': package_name, 'modules': _get_module_names(package_name)} return render_to_response( 'module_doc.html', {'page': page, 'sdk_version': sdk_version, 'package': package, 'package_name': package_name, 'module': module }, context_instance=RequestContext(r))
def _get_hunks(text): # changing the tuples to dictionaries try: hunks = list(apiparser.parse_hunks(text)) except Exception, err: log.error(str(err)) hunks = [[None, '<p>Sorry. Error in reading the doc. ' 'Please check <a href="https://jetpack.mozillalabs.com/' 'sdk/1.0b1/docs/#package/addon-kit">official docs</a></p>']]
def test_basic(self): """ exporting hunks """ from api.views import SDKPACKAGESDIR # XXX: the path is different now docs_dir = os.path.join(SDKPACKAGESDIR, "jetpack-core/docs") text = open(os.path.join(docs_dir, "url.md")).read() self.failUnless(len(list(apiparser.parse_hunks(text))) > 0)
def _respond_with_apidoc_json(self, path): docs_md = open(path, 'r').read() try: parsed = list(apiparser.parse_hunks(docs_md)) self.start_response('200 OK', [('Content-type', "text/plain")]) return [json.dumps(parsed)] except apiparser.ParseError, e: self.start_response('500 Parse Error', [('Content-type', "text/plain")]) return [str(e)]
def package(r, package_name=None): """ containing a listing of all modules docs """ if not package_name: package_name = DEFAULTLIB_NAME page = 'apibrowser' sdk_version = SDKVERSION DOC_FILES = _get_module_filenames(package_name) package = { 'name': _get_package_fullname(package_name), 'modules': []} for d in DOC_FILES: path = os.path.join(SDKPACKAGESDIR, package_name, 'docs', d) if not os.path.isdir(path): text = open( os.path.join(SDKPACKAGESDIR, package_name, 'docs', d)).read() (doc_name, extension) = os.path.splitext(d) # changing the tuples to dictionaries hunks = list(apiparser.parse_hunks(text)) data = {} for h in hunks: data[h[0]] = h[1] package['modules'].append({ 'name': doc_name, 'info': hunks[0][1], 'data': data, 'hunks': hunks }) return render_to_response( 'package_doc.html', {'page': page, 'sdk_version': sdk_version, 'package': package, 'package_name': package_name, 'corelib': (package_name == CORELIB_NAME), 'addon_kit': ADDON_KIT }, context_instance=RequestContext(r))
def generate_static_docs(env_root, tgz_filename, base_url = ''): web_docs = webdocs.WebDocs(env_root, base_url) server = Server(env_root, web_docs, task_queue=None, expose_privileged_api=False) staging_dir = os.path.join(env_root, "addon-sdk-docs") if os.path.exists(staging_dir): shutil.rmtree(staging_dir) # first, copy static-files shutil.copytree(server.root, staging_dir) # py2.5 doesn't have ignore=, so we delete tempfiles afterwards. If we # required >=py2.6, we could use ignore=shutil.ignore_patterns("*~") for (dirpath, dirnames, filenames) in os.walk(staging_dir): for n in filenames: if n.endswith("~"): os.unlink(os.path.join(dirpath, n)) # then copy docs from each package os.mkdir(os.path.join(staging_dir, "packages")) pkg_cfg = packaging.build_pkg_cfg(server.env_root) # starting with the (generated) index file index = json.dumps(packaging.build_pkg_index(pkg_cfg)) index_path = os.path.join(staging_dir, "packages", 'index.json') open(index_path, 'w').write(index) # and every doc-like thing in the package for pkg_name, pkg in pkg_cfg['packages'].items(): src_dir = pkg.root_dir dest_dir = os.path.join(staging_dir, "packages", pkg_name) if not os.path.exists(dest_dir): os.mkdir(dest_dir) # TODO: This is a DRY violation from main.js. We should # really move the common logic/names to cuddlefish.packaging. src_readme = os.path.join(src_dir, "README.md") if os.path.exists(src_readme): shutil.copyfile(src_readme, os.path.join(dest_dir, "README.md")) # create the package page package_doc_html = web_docs.create_package_page(src_dir) open(os.path.join(dest_dir, pkg_name + ".html"), "w")\ .write(package_doc_html) docs_src_dir = os.path.join(src_dir, "docs") docs_dest_dir = os.path.join(dest_dir, "docs") if not os.path.exists(docs_dest_dir): os.mkdir(docs_dest_dir) for (dirpath, dirnames, filenames) in os.walk(docs_src_dir): assert dirpath.startswith(docs_src_dir) relpath = dirpath[len(docs_src_dir)+1:] for dirname in dirnames: dest_path = os.path.join(docs_dest_dir, relpath, dirname) if not os.path.exists(dest_path): os.mkdir(dest_path) for filename in filenames: if filename.endswith("~"): continue src_path = os.path.join(dirpath, filename) dest_path = os.path.join(docs_dest_dir, relpath, filename) shutil.copyfile(src_path, dest_path) if filename.endswith(".md"): # parse and JSONify the API docs docs_md = open(src_path, 'r').read() docs_parsed = list(apiparser.parse_hunks(docs_md)) docs_json = json.dumps(docs_parsed) open(dest_path + ".json", "w").write(docs_json) # write the HTML div files docs_div = apirenderer.json_to_div(docs_parsed, src_path) open(dest_path + ".div", "w").write(docs_div) # write the standalone HTML files docs_html = web_docs.create_module_page(src_path) open(dest_path[:-3] + ".html", "w").write(docs_html) dev_guide_src = os.path.join(server.root, 'md', 'dev-guide') dev_guide_dest = os.path.join(staging_dir, 'dev-guide') if not os.path.exists(dev_guide_dest): os.mkdir(dev_guide_dest) for (dirpath, dirnames, filenames) in os.walk(dev_guide_src): assert dirpath.startswith(dev_guide_src) relpath = dirpath[len(dev_guide_src)+1:] for dirname in dirnames: dest_path = os.path.join(dev_guide_dest, relpath, dirname) if not os.path.exists(dest_path): os.mkdir(dest_path) for filename in filenames: if filename.endswith("~"): continue src_path = os.path.join(dirpath, filename) dest_path = os.path.join(dev_guide_dest, relpath, filename) if filename.endswith(".md"): # write the standalone HTML files docs_html = web_docs.create_guide_page(src_path) open(dest_path[:-3] + ".html", "w").write(docs_html) # make /md/dev-guide/welcome.html the top level index file shutil.copy(os.path.join(dev_guide_dest, 'welcome.html'), \ os.path.join(staging_dir, 'index.html')) # finally, build a tarfile out of everything tgz = tarfile.open(tgz_filename, 'w:gz') tgz.add('addon-sdk-docs', 'addon-sdk-docs') tgz.close() shutil.rmtree(staging_dir)
def generate_static_docs(env_root, tgz_filename, base_url=''): web_docs = webdocs.WebDocs(env_root, base_url) server = Server(env_root, web_docs, task_queue=None, expose_privileged_api=False) staging_dir = os.path.join(env_root, "addon-sdk-docs") if os.path.exists(staging_dir): shutil.rmtree(staging_dir) # first, copy static-files shutil.copytree(server.root, staging_dir) # py2.5 doesn't have ignore=, so we delete tempfiles afterwards. If we # required >=py2.6, we could use ignore=shutil.ignore_patterns("*~") for (dirpath, dirnames, filenames) in os.walk(staging_dir): for n in filenames: if n.endswith("~"): os.unlink(os.path.join(dirpath, n)) # then copy docs from each package os.mkdir(os.path.join(staging_dir, "packages")) pkg_cfg = packaging.build_pkg_cfg(server.env_root) # starting with the (generated) index file index = json.dumps(packaging.build_pkg_index(pkg_cfg)) index_path = os.path.join(staging_dir, "packages", 'index.json') open(index_path, 'w').write(index) # and every doc-like thing in the package for pkg_name, pkg in pkg_cfg['packages'].items(): src_dir = pkg.root_dir dest_dir = os.path.join(staging_dir, "packages", pkg_name) if not os.path.exists(dest_dir): os.mkdir(dest_dir) # TODO: This is a DRY violation from main.js. We should # really move the common logic/names to cuddlefish.packaging. src_readme = os.path.join(src_dir, "README.md") if os.path.exists(src_readme): shutil.copyfile(src_readme, os.path.join(dest_dir, "README.md")) # create the package page package_doc_html = web_docs.create_package_page(src_dir) open(os.path.join(dest_dir, pkg_name + ".html"), "w")\ .write(package_doc_html) docs_src_dir = os.path.join(src_dir, "docs") docs_dest_dir = os.path.join(dest_dir, "docs") if not os.path.exists(docs_dest_dir): os.mkdir(docs_dest_dir) for (dirpath, dirnames, filenames) in os.walk(docs_src_dir): assert dirpath.startswith(docs_src_dir) relpath = dirpath[len(docs_src_dir) + 1:] for dirname in dirnames: dest_path = os.path.join(docs_dest_dir, relpath, dirname) if not os.path.exists(dest_path): os.mkdir(dest_path) for filename in filenames: if filename.endswith("~"): continue src_path = os.path.join(dirpath, filename) dest_path = os.path.join(docs_dest_dir, relpath, filename) shutil.copyfile(src_path, dest_path) if filename.endswith(".md"): # parse and JSONify the API docs docs_md = open(src_path, 'r').read() docs_parsed = list(apiparser.parse_hunks(docs_md)) docs_json = json.dumps(docs_parsed) open(dest_path + ".json", "w").write(docs_json) # write the HTML div files docs_div = apirenderer.json_to_div(docs_parsed, src_path) open(dest_path + ".div", "w").write(docs_div) # write the standalone HTML files docs_html = web_docs.create_module_page(src_path) open(dest_path[:-3] + ".html", "w").write(docs_html) dev_guide_src = os.path.join(server.root, 'md', 'dev-guide') dev_guide_dest = os.path.join(staging_dir, 'dev-guide') if not os.path.exists(dev_guide_dest): os.mkdir(dev_guide_dest) for (dirpath, dirnames, filenames) in os.walk(dev_guide_src): assert dirpath.startswith(dev_guide_src) relpath = dirpath[len(dev_guide_src) + 1:] for dirname in dirnames: dest_path = os.path.join(dev_guide_dest, relpath, dirname) if not os.path.exists(dest_path): os.mkdir(dest_path) for filename in filenames: if filename.endswith("~"): continue src_path = os.path.join(dirpath, filename) dest_path = os.path.join(dev_guide_dest, relpath, filename) if filename.endswith(".md"): # write the standalone HTML files docs_html = web_docs.create_guide_page(src_path) open(dest_path[:-3] + ".html", "w").write(docs_html) # make /md/dev-guide/welcome.html the top level index file shutil.copy(os.path.join(dev_guide_dest, 'welcome.html'), \ os.path.join(staging_dir, 'index.html')) # finally, build a tarfile out of everything tgz = tarfile.open(tgz_filename, 'w:gz') tgz.add('addon-sdk-docs', 'addon-sdk-docs') tgz.close() shutil.rmtree(staging_dir)
def parse_text(self, text): return list(parse_hunks(text))
def generate_static_docs(env_root, tgz_filename): server = Server(env_root=env_root, task_queue=None, expose_privileged_api=False) staging_dir = os.path.join(env_root, "jetpack-sdk-docs") if os.path.exists(staging_dir): shutil.rmtree(staging_dir) # first, copy static-files shutil.copytree(server.root, staging_dir) # py2.5 doesn't have ignore=, so we delete tempfiles afterwards. If we # required >=py2.6, we could use ignore=shutil.ignore_patterns("*~") for (dirpath, dirnames, filenames) in os.walk(staging_dir): for n in filenames: if n.endswith("~"): os.unlink(os.path.join(dirpath, n)) # then copy docs from each package os.mkdir(os.path.join(staging_dir, "packages")) pkg_cfg = server.build_pkg_cfg() # starting with the (generated) index file index = json.dumps(server.build_pkg_index(pkg_cfg)) index_path = os.path.join(staging_dir, "packages", 'index.json') open(index_path, 'w').write(index) # and every doc-like thing in the package for pkg_name, pkg in pkg_cfg['packages'].items(): src_dir = pkg.root_dir dest_dir = os.path.join(staging_dir, "packages", pkg_name) if not os.path.exists(dest_dir): os.mkdir(dest_dir) # TODO: This is a DRY violation from main.js. We should # really move the common logic/names to cuddlefish.packaging. shutil.copyfile(os.path.join(src_dir, "README.md"), os.path.join(dest_dir, "README.md")) docs_src_dir = os.path.join(src_dir, "docs") docs_dest_dir = os.path.join(dest_dir, "docs") if not os.path.exists(docs_dest_dir): os.mkdir(docs_dest_dir) for (dirpath, dirnames, filenames) in os.walk(docs_src_dir): assert dirpath.startswith(docs_src_dir) relpath = dirpath[len(docs_src_dir) + 1:] for dirname in dirnames: dest_path = os.path.join(docs_dest_dir, relpath, dirname) if not os.path.exists(dest_path): os.mkdir(dest_path) for filename in filenames: if filename.endswith("~"): continue src_path = os.path.join(dirpath, filename) dest_path = os.path.join(docs_dest_dir, relpath, filename) shutil.copyfile(src_path, dest_path) if filename.endswith(".md"): # parse and JSONify the API docs docs_md = open(src_path, 'r').read() docs_parsed = list(apiparser.parse_hunks(docs_md)) docs_json = json.dumps(docs_parsed) open(dest_path + ".json", "w").write(docs_json) # finally, build a tarfile out of everything tgz = tarfile.open(tgz_filename, 'w:gz') tgz.add('jetpack-sdk-docs', 'jetpack-sdk-docs') tgz.close() shutil.rmtree(staging_dir)
def generate_static_docs(env_root, tgz_filename): server = Server(env_root=env_root, task_queue=None, expose_privileged_api=False) staging_dir = os.path.join(env_root, "addon-sdk-docs") if os.path.exists(staging_dir): shutil.rmtree(staging_dir) # first, copy static-files shutil.copytree(server.root, staging_dir) # py2.5 doesn't have ignore=, so we delete tempfiles afterwards. If we # required >=py2.6, we could use ignore=shutil.ignore_patterns("*~") for (dirpath, dirnames, filenames) in os.walk(staging_dir): for n in filenames: if n.endswith("~"): os.unlink(os.path.join(dirpath, n)) # then copy docs from each package os.mkdir(os.path.join(staging_dir, "packages")) pkg_cfg = server.build_pkg_cfg() # starting with the (generated) index file index = json.dumps(server.build_pkg_index(pkg_cfg)) index_path = os.path.join(staging_dir, "packages", 'index.json') open(index_path, 'w').write(index) # and every doc-like thing in the package for pkg_name, pkg in pkg_cfg['packages'].items(): src_dir = pkg.root_dir dest_dir = os.path.join(staging_dir, "packages", pkg_name) if not os.path.exists(dest_dir): os.mkdir(dest_dir) # TODO: This is a DRY violation from main.js. We should # really move the common logic/names to cuddlefish.packaging. src_readme = os.path.join(src_dir, "README.md") if os.path.exists(src_readme): shutil.copyfile(src_readme, os.path.join(dest_dir, "README.md")) docs_src_dir = os.path.join(src_dir, "docs") docs_dest_dir = os.path.join(dest_dir, "docs") if not os.path.exists(docs_dest_dir): os.mkdir(docs_dest_dir) for (dirpath, dirnames, filenames) in os.walk(docs_src_dir): assert dirpath.startswith(docs_src_dir) relpath = dirpath[len(docs_src_dir)+1:] for dirname in dirnames: dest_path = os.path.join(docs_dest_dir, relpath, dirname) if not os.path.exists(dest_path): os.mkdir(dest_path) for filename in filenames: if filename.endswith("~"): continue src_path = os.path.join(dirpath, filename) dest_path = os.path.join(docs_dest_dir, relpath, filename) shutil.copyfile(src_path, dest_path) if filename.endswith(".md"): # parse and JSONify the API docs docs_md = open(src_path, 'r').read() docs_parsed = list(apiparser.parse_hunks(docs_md)) docs_json = json.dumps(docs_parsed) open(dest_path + ".json", "w").write(docs_json) # finally, build a tarfile out of everything tgz = tarfile.open(tgz_filename, 'w:gz') tgz.add('addon-sdk-docs', 'addon-sdk-docs') tgz.close() shutil.rmtree(staging_dir)