class _(TestCase): def setUp(self): TestCase.setUp(self) from random import randint self.port = randint(10000, 20000) from lib.WsgiRunner import PasteThread self.pasteThread = PasteThread(RecordApp(), self.port, timeout=3) self.pasteThread.start() import time time.sleep(1) def testGet(self): self.assertTrue(self.pasteThread.isAlive()) from httplib import HTTPConnection http_connection = HTTPConnection("localhost", port=self.port) http_connection.request("GET", "/api/Record") response = http_connection.getresponse() debug(response.read()) self.assertTrue(response.status == 404 or response.status == 200 or response.status == 500) def testGvizDataTable(self): session = SqlAlchemySessionFactory().createSqlAlchemySession() import gviz_api data_table = Record.getGvizDataTable() self.assertIsInstance(data_table, gviz_api.DataTable) import re r = re.compile(r"^google\.visualization\.Query\.setResponse\({.*}\);$") m = r.match(data_table.ToResponse()) self.assertIsNotNone(m) session.close() def tearDown(self): TestCase.tearDown(self)
class _TestApiCrawl(TestCase): def setUp(self): TestCase.setUp(self) from random import randint self.port = randint(10000, 20000) from lib.WsgiRunner import PasteThread self.pasteThread = PasteThread(CrawlApp(), self.port, timeout=5) self.pasteThread.start() import time time.sleep(1) def testGet(self): self.assertTrue(self.pasteThread.isAlive()) from httplib import HTTPConnection http_connection = HTTPConnection("localhost", port=self.port) http_connection.request("GET", "/api/Crawl") response = http_connection.getresponse() self.assertTrue(response.status == 404 or response.status == 200 or response.status == 500) def test2(self): session = Session() info(Crawl.getGvizDataTable(session)) session.close() def tearDown(self): TestCase.tearDown(self)
def setUp(self): TestCase.setUp(self) from random import randint self.port = randint(10000, 20000) from lib.WsgiRunner import PasteThread self.pasteThread = PasteThread(RecordApp(), self.port, timeout=3) self.pasteThread.start() import time time.sleep(1)
def setUp(self): TestCase.setUp(self) from random import randint self.port = randint(10000,20000) from lib.WsgiRunner import PasteThread self.pasteThread = PasteThread(CrawlApp(), self.port, timeout=5) self.assertIsNotNone(self.pasteThread.server) self.assertFalse(self.pasteThread.isAlive()) self.assertTrue(self.pasteThread.server.running) self.pasteThread.start() self.assertTrue(self.pasteThread.isAlive()) self.assertTrue(self.pasteThread.server.running) import time time.sleep(1)
class _TestApp(unittest.TestCase): def setUp(self): unittest.TestCase.setUp(self) from random import randint self.port = randint(10000, 20000) from lib.WsgiRunner import PasteThread self.pasteThread = PasteThread(CrawlApp(), self.port, timeout=5) self.assertIsNotNone(self.pasteThread.server) self.assertFalse(self.pasteThread.isAlive()) self.assertTrue(self.pasteThread.server.running) self.pasteThread.start() self.assertTrue(self.pasteThread.isAlive()) self.assertTrue(self.pasteThread.server.running) import time time.sleep(1) def testApiCrawl(self): from httplib import HTTPConnection http_connection = HTTPConnection("localhost", port=self.port) http_connection.request("GET", "/api/Crawl") response = http_connection.getresponse() info("reading body,m %d " % response.status) body = response.read() info(body) self.assertTrue(response.status == 500 or response.status == 404 or response.status == 200) self.assertGreater(len(body), 1) info("closing http connection") http_connection.close() def tearDown(self): info("shutting down PasteThread") self.pasteThread.shutdown() info("joining") self.pasteThread.join() unittest.TestCase.tearDown(self)
from api.Crawl import CrawlApp if __name__ == "__main__": from paste.urlparser import StaticURLParser html_app = StaticURLParser("html") css_app = StaticURLParser("css") js_app = StaticURLParser("js") from paste.cascade import Cascade cascaded_app = Cascade([html_app, css_app, js_app, _EnvironmentApp(), RecordApp(), CrawlApp()]) while True: from lib.WsgiRunner import PasteThread paste_thread = PasteThread(cascaded_app, 10523, timeout=20) debug("starting PasteThread") paste_thread.start() debug("waiting for PasteThread to stop") try: paste_thread.join() except KeyError, e: pass info("restarting PasteThread") import webbrowser webbrowser.open("http://localhost:10523/Record.html", autoraise=1) import unittest