def test_logs_inside(app, client): with app.test_request_context(): for project, spider in [(PROJECT, None), (PROJECT, SPIDER)]: title = 'Directory listing for /logs/%s/%s' % (project, spider or '') url = url_for('logs', node=1, project=project, spider=spider) response = client.get(url) assert title in get_text(response) and not is_simple_ui(response)
def test_items(app, client): title = 'Directory listing for /items/' with app.test_request_context(): url = url_for('items', node=1, ui='simple') response = client.get(url) assert ((title in get_text(response) or 'No Such Resource' in get_text(response)) and is_simple_ui(response))
def test_listprojects(app, client): # upload_file_deploy(app, client, filename='demo.egg', project=PROJECT, redirect_project=PROJECT) upload_file_deploy(app, client, filename='demo.zip', project=PROJECT, redirect_project=PROJECT) title = 'Get the list of projects uploaded' with app.test_request_context(): url = url_for('manage', node=1) response = client.get(url) assert title in get_text(response) and not is_simple_ui(response)
def test_parse_uploaded_demo_txt(app, client): with app.test_request_context(): url = url_for('parse.uploaded', node=1, filename='demo.txt', ui='simple') response = client.get(url) assert 'Stats collection' in get_text(response) and is_simple_ui( response)
def test_log_stats(app, client): with app.test_request_context(): url = url_for('log', node=1, opt='stats', project=PROJECT, spider=SPIDER, job=jobid, ui='simple') response = client.get(url) assert 'Stats collection' in get_text(response) and is_simple_ui( response)
def test_log_utf8(app, client): with app.test_request_context(): url = url_for('log', node=1, opt='utf8', project=PROJECT, spider=SPIDER, job=jobid, ui='simple') response = client.get(url) assert 'utf8 - ScrapydWeb' in get_text(response) and is_simple_ui( response)
def test_log_utf8_stats(app, client): upload_file_deploy(app, client, filename='demo.egg', project=PROJECT, redirect_project=PROJECT) with app.test_request_context(): url = url_for('api', node=1, opt='start', project=PROJECT, version_spider_job=SPIDER) response = client.get(url) js = load_json(response) jobid = js['jobid'] sleep() # UTF8 page url = url_for('log', node=1, opt='utf8', project=PROJECT, spider=SPIDER, job=jobid) response = client.get(url) assert 'utf8 - ScrapydWeb' in get_text(response) and not is_simple_ui(response) client.get(url_for('api', node=1, opt='forcestop', project=PROJECT, version_spider_job=jobid)) # Stats page url = url_for('log', node=1, opt='stats', project=PROJECT, spider=SPIDER, job=jobid) response = client.get(url) assert 'Stats collection' in get_text(response) and not is_simple_ui(response)
def test_parse_upload(app, client): title = 'Upload a scrapy log file to parse' with app.test_request_context(): url = url_for('parse.upload', node=1, ui='simple') response = client.get(url) assert title in get_text(response) and is_simple_ui(response)
def test_logs(app, client): title = 'Directory listing for /logs/' with app.test_request_context(): url = url_for('logs', node=1, ui='simple') response = client.get(url) assert title in get_text(response) and is_simple_ui(response)
def test_dashboard(app, client): with app.test_request_context(): url = url_for('dashboard', node=1, ui='simple') response = client.get(url) assert 'Visit desktop version' in get_text(response) and is_simple_ui( response)