Ejemplo n.º 1
0
def test_populate_areas():
	from webpages.populate import populate, save_expander
	
	populate(save_expander,prep_site_config("w9",**{"browser": "desktop"}))
	assert exists(join(pages_test_root,"output","index.html"))

	soup = get_soup(join(pages_test_root,"output","index.html"))
	a1 = soup.find(id="a1")
	a2 = soup.find(id="a2")
	config = eval_config_script(soup("script")[2].string)

	# The elements with parts content and matter
	assert a1.contents[0].strip() == "top bit"
	assert a1.contents[1].string.strip() == "section one"
	assert a1.contents[3].string.strip() == "section two"
	assert a1["class"].split() == [u"splash-area-inactive", u"upper-area-inactive", u"lower-area-inactive"]
	assert soup.find("section",id="s1")["class"].split() == [u"in-splash-area", u"in-splash-order-0", u"in-upper-area", u"in-upper-order-0", u"in-upper-order-last"]
	assert soup.find("section",id="s1")["role"] == "deck"
	assert soup.find("section",id="s2")["class"].split() == [u"in-splash-area", u"in-splash-order-1", u"in-lower-area", u"in-lower-order-0", u"in-lower-order-last", u"in-splash-order-last"]

	assert config["a1"] == {"area-names": ["splash","upper", "lower"], "charset": "utf-8", "layouter": "area-stage"}
	assert config["s2"] == {"area-names": ["splash","lower"], "charset": "utf-8", "laidout": "area-member"}
	assert config["s1"] == {"area-names": ["splash","upper"], "charset": "utf-8", "laidout": "area-member"}

	# The elements without parts, simply inline in HTML
	assert soup.find(id="a2")

	assert config["s4"] == {"area-names": ["second"], "laidout": "area-member"}
	assert config["s3"] == {"area-names": ["first"], "laidout": "area-member"}
	assert config["a2"] == {"area-names": ["first", "second"], "layouter": "area-stage"}
Ejemplo n.º 2
0
def test_populate_assets():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w7",**{"browser": "desktop"})

	populate(save_expander,config)
	assert exists(join(pages_test_root,"output","index.html"))
	assert exists(join(pages_test_root,"output","public","js","html5.js"))
	assert exists(join(pages_test_root,"output","public","css","test.css"))
Ejemplo n.º 3
0
def test_populate_exclude_published():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w6")

	populate(save_expander,config)

	assert not exists(join(pages_test_root,"output","desktop","not-published","index.html"))
	assert not exists(join(pages_test_root,"output","desktop","css","unpublished-test.css"))
	assert not exists(join(pages_test_root,"output","desktop","js","unpublished.js"))
	assert not exists(join(pages_test_root,"output","desktop","internal","page1","index.html"))
Ejemplo n.º 4
0
def test_populate_stateful():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w8",**{"browser": "desktop"})

	populate(save_expander,config)

	soup = get_soup(pages_test_root,"output","index.html")

	assert soup("article",id="a1")[0].contents[0].strip() == "myarticle"
	assert soup("article",id="a1")[0].contents[1].string.strip() == "section one"
	assert soup("script")[2]["type"] == "application/config"
	assert soup("script")[2].string.strip() == """\
Ejemplo n.º 5
0
def test_populate_scss():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w5")

	populate(save_expander,config)
	_assert_content_re(join(pages_test_root,"output","desktop","css","test.css"),r"\.test\{")
	_assert_content_re(join(pages_test_root,"output","tablet","css","test.css"),r"\.test\{")
	_assert_content_re(join(pages_test_root,"output","pocket","css","test.css"),r"\.test2\{")

	_assert_content_re(join(pages_test_root,"output","desktop","css","desktop.css"),r"\.desktop2\{")
	_assert_content_re(join(pages_test_root,"output","tablet","css","desktop.css"),r"\.desktop\{")
	_assert_content_re(join(pages_test_root,"output","pocket","css","desktop.css"),r"\.desktop\{")
Ejemplo n.º 6
0
def test_populate_jquery():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w2")

	populate(save_expander,config)
	assert exists(join(pages_test_root,"output","desktop","js","jquery-1.5.1.js"))
	jqsize = getsize(join(pages_test_root,"w2","_libs","jquery-1.5.1.js"))
	assert getsize(join(pages_test_root,"output","desktop","js","jquery-1.5.1.js")) == jqsize
	assert getsize(join(pages_test_root,"output","desktop","js","jquery.js")) == jqsize
	jqminsize = getsize(join(pages_test_root,"w2","_libs","jquery-1.5.1.min.js"))
	assert getsize(join(pages_test_root,"output","desktop","js","jquery-1.5.1.min.js")) == jqminsize
	assert getsize(join(pages_test_root,"output","desktop","js","jquery.min.js")) == jqminsize
Ejemplo n.º 7
0
def test_populate_trackers():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w10",**{"browser": "desktop"})

	populate(save_expander,config)
	assert exists(join(pages_test_root,"output","index.html"))

	soup = get_soup(pages_test_root,"output","index.html")

	# print soup.find_all(attrs={ "tracker-parent":re.compile(r".*") })

	assert soup.find("article",id="a1").contents[0].strip() == "top bit"
	assert soup.find("section",id="s1").string.strip() == "section one"
	trackerTwo = soup.article.find_all("div")[0]
	sectionTwo = soup.article("section")[1]
	trackerThree = soup.article.find_all("div")[1]
	sectionThree = soup.article("section")[2]
	trackerFour = soup.article.find(id="other-trackers").div
	sectionFour = soup.article("section")[3]
	s2id = sectionTwo["id"].encode("utf-8")
	s2trk = trackerTwo["id"].encode("utf-8")
	s3id = sectionThree["id"].encode("utf-8")
	s3trk = trackerThree["id"].encode("utf-8")
	s4id = sectionFour["id"].encode("utf-8")
	s4trk = trackerFour["id"].encode("utf-8")
	assert s2id is not None
	assert s3id is not None

	soup("script")[2].src == "application.js"

	config = eval_config_script(soup("script")[-1].string)
	assert config["a1"] == {"area-names": ["upper", "lower"], "charset": "utf-8", "layouter": "area-stage"}
	assert config["s1"] == {"area-names": ["upper"], "charset": "utf-8", "laidout": "area-member"}
	assert config[s2id] == {"driven-by": s2trk, "tracker-driven": ["left", "top"]}
	assert config[s3id] == {"driven-by": s3trk, "tracker-driven": ["left", "top"], 
		# "tracker-parent": "other-trackers",
		"area-names": ["lower"], 
		#"charset": "utf-8", 
		"laidout": "area-member" }

	# assert soup("section",id="s2")[0]["class"] == "in-lower-area in-lower-order-0"
	assert trackerTwo["class"].split() == [u"tracker", u"section-tracker"]
	assert trackerThree["class"].split() == [u"tracker", u"section-tracker", u"in-lower-area",u"in-lower-order-0"]

	# print sectionFour, config[s4id]
	assert "tracker-parent" not in trackerFour
	assert config[s4id] == {"driven-by": s4trk, "tracker-driven": ["left", "top"], 
		"tracker-parent": "other-trackers",
		"area-names": ["upper"], 
		"laidout": "area-member" }
Ejemplo n.º 8
0
def test_populate_desktop_browser():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w1",**{"browser": "desktop"})

	populate(save_expander,config)
	assert exists(join(pages_test_root,"output","js","html5.js"))
	h5size = getsize(join(pages_test_root,"resources","html5-patch.js"))
	disclsize = getsize(join(pages_test_root,"resources","lead-disclaimer.js"))

	assert getsize(join(pages_test_root,"output","js","html5.js")) == disclsize + h5size

	soup = get_soup(pages_test_root,"output","bodypart","index.html")
	assert soup.html == soup.find(attrs={ "class": "no-js desktop" })
Ejemplo n.º 9
0
def test_populate():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w1")

	populate(save_expander,config)
	assert exists(join(pages_test_root,"output","desktop","js","html5.js"))
	assert exists(join(pages_test_root,"output","pocket","js","html5.js"))
	assert exists(join(pages_test_root,"output","tablet","js","html5.js"))
	assert not exists(join(pages_test_root,"output","desktop","mymodule"))
	assert not exists(join(pages_test_root,"output","pocket","mymodule"))
	assert not exists(join(pages_test_root,"output","tablet","mymodule"))

	soup = get_soup(pages_test_root,"output","desktop","partless","index.html")
	assert soup.head == soup.find(id="h")
	assert soup.body == soup.find(id="b")
	assert soup.body.string.strip() == "body comes here"
	
	soup = get_soup(pages_test_root,"output","desktop","bodypart","index.html")
	assert soup.html == soup.find(attrs={ "class": "no-js desktop" })
	assert soup.html["class"].split() == [u"no-js",u"desktop"]
	assert soup.head == soup.find(id="h")
	assert soup.body == soup.find(id="b")
	assert soup.body.string.strip() == "in a body tag"
	soup_tablet = get_soup(pages_test_root,"output","tablet","bodypart","index.html")
	assert soup_tablet.html["class"].split() == [u"no-js",u"tablet"]
	soup_pocket = get_soup(pages_test_root,"output","pocket","bodypart","index.html")
	assert soup_pocket.html["class"].split() == [u"no-js",u"pocket"]

	soup = get_soup(pages_test_root,"output","desktop","mixinbody","index.html")
	assert soup.html == soup.find(attrs={ "class": "no-js desktop" })
	assert soup.head == soup.find(id="h")
	assert soup.body == soup.find(id="b")
	#TODO assert soup.find(attrs={ "src":"extra.js" })
	
	soup = get_soup(pages_test_root,"output","desktop","articlepart","index.html")
	assert soup.html == soup.find(attrs={ "class": "no-js desktop" })
	assert soup.head == soup.find(id="h")
	assert soup.body == soup.find(id="b")
	assert soup.article == soup.find(id="a")
	assert soup.article.h2.string.strip() == "Article"
	assert soup.article.p.string.strip() == "here is the article"

	pg_size = getsize(join(pages_test_root,"w1","assets","horizontal-rule.gif"))
	assert getsize(join(pages_test_root,"output","desktop","assets","horizontal-rule.gif")) == pg_size
	pg_size = getsize(join(pages_test_root,"w1","assets","input-background.png"))
	assert getsize(join(pages_test_root,"output","desktop","assets","input-background.png")) == pg_size
	pg_size = getsize(join(pages_test_root,"w1","assets","page-background.jpg"))
	assert getsize(join(pages_test_root,"output","desktop","assets","page-background.jpg")) == pg_size
Ejemplo n.º 10
0
def test_populate_http_fetch():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w3",**{"browser": "desktop"})

	import SimpleHTTPServer
	import SocketServer
	import threading

	PORT = 64321

	class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler):
		def do_GET(self):
			self.send_response(200)
			self.send_header('Content-type',"text/javascript")
			self.end_headers()
			jsf = None
			with open(join(pages_test_root,"resources","html5-patch.js")) as f:
				jsf = f.read()
			self.wfile.write(jsf)

	class TestServer(SocketServer.TCPServer):
	    allow_reuse_address = True
    
   	httpd = TestServer(("localhost", PORT), Handler)
	httpd_thread = threading.Thread(target=httpd.serve_forever)
	httpd_thread.setDaemon(True)
	httpd_thread.start()

	populate(save_expander,config)
	h5size = getsize(join(pages_test_root,"resources","html5-patch.js"))
	disclsize = getsize(join(pages_test_root,"resources","lead-disclaimer.js"))
	fofsize = getsize(join(pages_test_root,"resources","fourofour.html"))

	assert exists(join(pages_test_root,"output","js","html5.js"))
	assert getsize(join(pages_test_root,"output","js","html5.js")) == h5size # disclsize
	assert exists(join(pages_test_root,"output","html5.js"))
	assert getsize(join(pages_test_root,"output","html5.js")) == h5size
	assert exists(join(pages_test_root,"output","js","html5-2.js"))
	assert getsize(join(pages_test_root,"output","js","html5-2.js")) == h5size
	assert exists(join(pages_test_root,"output","404","index.html"))
	soup404 = get_soup(pages_test_root,"output","404","index.html")
	assert len(soup404.body.string.strip())+1 == fofsize

	assert exists(join(pages_test_root,"output","js","test.js"))
	assert getsize(join(pages_test_root,"output","js","test.js")) == 0
	assert exists(join(pages_test_root,"output","js","test-fetched.js"))
	assert getsize(join(pages_test_root,"output","js","test.js")) == 0
Ejemplo n.º 11
0
def test_populate_html_expansion():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w4")

	populate(save_expander,config)
	assert exists(join(pages_test_root,"output","desktop","about","index.html"))
	assert exists(join(pages_test_root,"output","desktop","301.html"))
	assert exists(join(pages_test_root,"output","desktop","404.html"))
	assert exists(join(pages_test_root,"output","desktop","with-ext.html"))

	soup = get_soup(pages_test_root,"output","desktop","about","index.html")
	assert soup.head.find("meta",attrs={ "name":"author" })["content"] == "Henrik Vendelbo"
	assert soup.head.find("meta",attrs={ "name":"description" })["content"] == "Information about the Product"
	assert soup.head.find("title").string.strip() == "About the Product"
	# assert soup.head.find("meta",attrs={ "name":"charset" })["content"] == "utf-8"

	soup = get_soup(pages_test_root,"output","desktop","iso-encoded","index.html")
	# assert soup.head.find("meta",attrs={ "name":"charset" })["content"] == "iso-8859-1"

	soup = get_soup(pages_test_root,"output","desktop","index.html")
	assert soup.script["src"] == "head.js"
	assert soup.body.article.contents[0].string.strip() == "index page"

	assert soup.find(id="article-head")["rel"] == "stylesheet"
	assert soup.find(id="article-head2")["rel"] == "stylesheet"

	soup = get_soup(pages_test_root,"output","desktop","with-head","index.html")
	assert soup.script["src"] == "head.js"
	# assert soup.body.article.contents[0].string.strip() == "index page"

	assert soup.find(id="article-head")["src"] == "one.js"
	assert soup.find(id="article-head2")["src"] == "two.js"

	#TODO test that derived parts head is mixed in

	soup = get_soup(pages_test_root,"output","desktop","IE-markup","index.html")


	soup = get_soup(pages_test_root,"output","desktop","pocketable-doc","index.html")
	soup.find("meta",attrs={ "name":"description"}) == "Web Page Specification for Quality Assurance"
	soup.find("meta",attrs={ "name":"author"}) == "Henrik Vendelbo"
	soup.body.article.string.strip() == "Here is my article"
Ejemplo n.º 12
0
def test_populate_parts():
	from webpages.populate import populate, save_expander
	
	config = prep_site_config("w6")

	populate(save_expander,config)

	soup = get_soup(pages_test_root,"output","desktop","index.html")

	assert soup("article",id="a1")[0].contents[0].strip() == "myarticle"
	assert soup("article",id="a1")[0].contents[1].string.strip() == "section two"
	assert soup("article",id="a4")[0].contents[0].strip() == "myarticle"
	assert soup("article",id="a4")[0].contents[1].string.strip() == "section two"
	assert soup("aside",id="a2")[0].string.strip() == "myaside"
	assert soup("nav",id="n1")[0].string.strip() == "nav1"
	assert soup.find("nav",id="n1")["role"] == "dialog"
	# assert soup("section",id="s1")[0].contents[1].string.strip() == "<h1>header1</h1>"
	
	assert soup("form",id="f1")[0].button.string.strip() == "submit 1"
	assert soup("form",id="f2")[0].button.string.strip() == "submit 1"
	
	assert soup("script",id="conf")[0].string.strip() == 'window.myconf = { "a":"a"};'
	assert "inline-src" not in soup("script",id="conf")[0]