def test_frontpage_staticsite(self): test = staticmockclass(datadir=self.tempdir) test2 = staticmockclass2(datadir=self.tempdir) outfile = self.tempdir+'/index.html' manager.makeresources([test,test2], self.tempdir+'/rsrc') manager.frontpage([test,test2], path=outfile, staticsite=True) t = ET.parse(outfile) header = t.find(".//header/h1/a") self.assertEqual(header.get("href"), 'index.html') headernavlinks = t.findall(".//header/nav/ul/li/a") self.assertEqual(headernavlinks[0].get("href"), 'staticmock/toc/index.html') self.assertEqual(headernavlinks[1].get("href"), 'staticmock2/toc/index.html') css = t.findall("head/link[@rel='stylesheet']") self.assertRegex(css[0].get('href'), '^rsrc/css')
def test_frontpage_staticsite(self): test = staticmockclass(datadir=self.tempdir) test2 = staticmockclass2(datadir=self.tempdir) outfile = self.tempdir+'/index.html' Resources([test,test2], self.tempdir+'/rsrc').make() manager.frontpage([test,test2], path=outfile, staticsite=True, removeinvalidlinks=False) # print("\n============== OUTFILE =====================") # print(util.readfile(outfile)) # print("==============================================") t = ET.parse(outfile) header = t.find(".//div[@class='navbar-header']/a") self.assertEqual(header.get("href"), 'index.html') headernavlinks = t.findall(".//ul[@class='nav navbar-nav']/li/a") self.assertEqual(headernavlinks[0].get("href"), 'staticmock/toc/index.html') self.assertEqual(headernavlinks[1].get("href"), 'staticmock2/toc/index.html') css = t.findall("head/link[@rel='stylesheet']") self.assertRegex(css[-1].get('href'), '^rsrc/css')
def test_frontpage_staticsite(self): test = staticmockclass(datadir=self.tempdir) test2 = staticmockclass2(datadir=self.tempdir) outfile = self.tempdir + '/index.html' Resources([test, test2], self.tempdir + '/rsrc').make() manager.frontpage([test, test2], path=outfile, staticsite=True, removeinvalidlinks=False) # print("\n============== OUTFILE =====================") # print(util.readfile(outfile)) # print("==============================================") t = ET.parse(outfile) header = t.find(".//div[@class='navbar-header']/a") self.assertEqual(header.get("href"), 'index.html') headernavlinks = t.findall(".//ul[@class='nav navbar-nav']/li/a") self.assertEqual(headernavlinks[0].get("href"), 'staticmock/toc/index.html') self.assertEqual(headernavlinks[1].get("href"), 'staticmock2/toc/index.html') css = t.findall("head/link[@rel='stylesheet']") self.assertRegex(css[-1].get('href'), '^rsrc/css')
def test_frontpage(self): test = staticmockclass() test2 = staticmockclass2() outfile = self.tempdir + '/index.html' Resources([test, test2], self.tempdir + '/rsrc').make() res = manager.frontpage([test, test2], path=outfile) self.assertTrue(res) tree = ET.parse(outfile) header = tree.find(".//div[@class='navbar-header']/a") self.assertEqual(header.get("href"), 'http://*****:*****@class='section-wrapper']") self.assertEqual(2, len(list(divs))) self.assertEqual("staticmock", divs[0].get("id")) self.assertEqual("staticmock2", divs[1].get("id")) self.assertIn("Handles foaf:Document", divs[0].find("p").text) self.assertIn("Contains 3 published documents", divs[0].find("p").text)
def test_frontpage(self): test = staticmockclass() test2 = staticmockclass2() outfile = self.tempdir+'/index.html' Resources([test,test2], self.tempdir+'/rsrc').make() res = manager.frontpage([test,test2], path=outfile) self.assertTrue(res) tree = ET.parse(outfile) header = tree.find(".//div[@class='navbar-header']/a") self.assertEqual(header.get("href"), 'http://*****:*****@class='section-wrapper']") self.assertEqual(2, len(list(divs))) self.assertEqual("staticmock", divs[0].get("id")) self.assertEqual("staticmock2", divs[1].get("id")) self.assertIn("Handles foaf:Document", divs[0].find("p").text) self.assertIn("Contains 3 published documents", divs[0].find("p").text)
'items': items, 'doccount': len(list(self.store.list_basefiles_for("_postgenerate")))}) # end frontpage_content from ferenda import manager, LayeredConfig import sys manager.setup_logger("DEBUG") d = RFCs(downloadmax=5) d.download() for basefile in d.store.list_basefiles_for("parse"): d.parse(basefile) RFCs.setup("relate", LayeredConfig(d.get_default_options())) for basefile in d.store.list_basefiles_for("relate"): d.relate(basefile) RFCs.teardown("relate", LayeredConfig(d.get_default_options())) manager.makeresources([d]) for basefile in d.store.list_basefiles_for("generate"): d.generate(basefile) d.toc() d.news() manager.frontpage([d]) shutil.rmtree("data") return_value = True
# begin parse-all import logging from w3cstandards import W3CStandards # client code is responsible for setting the effective log level -- ferenda # just emits log messages, and depends on the caller to setup the logging # subsystem in an appropriate way logging.getLogger().setLevel(logging.INFO) repo = W3CStandards() for basefile in repo.store.list_basefiles_for("parse"): # You you might want to try/catch the exception # ferenda.errors.ParseError or any of it's children here repo.parse(basefile) # end parse-all # begin final-commands from ferenda import manager from w3cstandards import W3CStandards repo = W3CStandards() for basefile in repo.store.list_basefiles_for("relate"): repo.relate(basefile) manager.makeresources([repo], sitename="Standards", sitedescription="W3C standards, in a new form") for basefile in repo.store.list_basefiles_for("generate"): repo.generate(basefile) repo.toc() repo.news() manager.frontpage([repo]) # end final-commands shutil.rmtree(repo.config.datadir) return_value = True
docrepo.relate(basefile, docrepos) # Prepare various assets for web site navigation makeresources(docrepos, resourcedir="netstandards/exampledata/rsrc", sitename="Netstandards", sitedescription="A repository of internet standard documents") # Relate for all repos must run before generate for any repo for docrepo in docrepos: # Generate static HTML files from the parsed documents, # with back- and forward links between them, etc. for basefile in docrepo.store.list_basefiles_for("generate"): docrepo.generate(basefile) # Generate a table of contents of all available documents docrepo.toc() # Generate feeds of new and updated documents, in HTML and Atom flavors docrepo.news() # Create a frontpage for the entire site frontpage(docrepos, path="netstandards/exampledata/index.html") # Start WSGI app at http://localhost:8000/ with navigation, # document viewing, search and API # runserver(docrepos, port=8000, documentroot="netstandards/exampledata") # end example shutil.rmtree("netstandards") return_value = True
docrepo.relate(basefile, docrepos) # Prepare various assets for web site navigation makeresources(docrepos, resourcedir="netstandards/exampledata/rsrc", sitename="Netstandards", sitedescription="A repository of internet standard documents") # Relate for all repos must run before generate for any repo for docrepo in docrepos: # Generate static HTML files from the parsed documents, # with back- and forward links between them, etc. for basefile in docrepo.store.list_basefiles_for("generate"): docrepo.generate(basefile) # Generate a table of contents of all available documents docrepo.toc() # Generate feeds of new and updated documents, in HTML and Atom flavors docrepo.news() # Create a frontpage for the entire site frontpage(docrepos,path="netstandards/exampledata/index.html") # Start WSGI app at http://localhost:8000/ with navigation, # document viewing, search and API # runserver(docrepos, port=8000, documentroot="netstandards/exampledata") # end example shutil.rmtree("netstandards") return_value = True
items, 'doccount': len(list(self.store.list_basefiles_for("_postgenerate"))) }) # end frontpage_content from ferenda import manager from layeredconfig import LayeredConfig, Defaults import sys manager.setup_logger("DEBUG") d = RFCs(downloadmax=5) d.download() for basefile in d.store.list_basefiles_for("parse"): d.parse(basefile) RFCs.setup("relate", LayeredConfig(Defaults(d.get_default_options()))) for basefile in d.store.list_basefiles_for("relate"): d.relate(basefile) RFCs.teardown("relate", LayeredConfig(Defaults(d.get_default_options()))) manager.makeresources([d]) for basefile in d.store.list_basefiles_for("generate"): d.generate(basefile) d.toc() d.news() manager.frontpage([d]) shutil.rmtree("data") return_value = True