def test_addversion(app, client): data = { 'project': 'fakeproject', 'version': 'fakeversion', 'file': (io.BytesIO(b'my file contents'), "fake.egg") } req_single_scrapyd(app, client, view='deploy.upload', kws=dict(node=1), data=data, ins='activate_egg')
def test_scrapy_cfg(app, client): with app.test_request_context(): for folder, result in SCRAPY_CFG_DICT.items(): data = { 'folder': folder, 'project': PROJECT, 'version': VERSION, } if result: req_single_scrapyd(app, client, view='deploy.upload', kws=dict(node=1), data=data, ins=result) else: req_single_scrapyd(app, client, view='deploy.upload', kws=dict(node=1), data=data, location=url_for('schedule.schedule', node=1, project=PROJECT, version=VERSION))
def test_select_multinode_checkbox(app, client): for view in ['deploy', 'schedule']: req_single_scrapyd(app, client, view=view, kws=dict(node=1), nos='CheckAll / UncheckAll')
def test_auto_packaging_select_option(app, client): ins = [ '(14 projects)', u"var folders = ['ScrapydWeb_demo', 'demo - 副本', 'demo',", "var projects = ['ScrapydWeb_demo', 'demo-copy', 'demo',", '<div>%s<' % cst.PROJECT, u'<div>demo - 副本<', '<div>demo<', '<div>demo_only_scrapy_cfg<' ] nos = ['<div>demo_without_scrapy_cfg<', '<h3>NO projects found'] req_single_scrapyd(app, client, view='deploy', kws=dict(node=1), ins=ins, nos=nos) for project in [cst.PROJECT, 'demo']: with io.open(os.path.join(cst.CWD, 'data/%s/test' % project), 'w', encoding='utf-8') as f: f.write(u'') ins = ['id="folder_selected" value="%s"' % project, 'id="folder_selected_statement">%s<' % project] req_single_scrapyd(app, client, view='deploy', kws=dict(node=1), ins=ins) with io.open(os.path.join(cst.CWD, 'data/demo/test'), 'w', encoding='utf-8') as f: f.write(u'') # SCRAPY_PROJECTS_DIR=os.path.join(cst.CWD, 'data'), app.config['SCRAPY_PROJECTS_DIR'] = os.path.join(cst.CWD, 'not-exist') req_single_scrapyd(app, client, view='deploy', kws=dict(node=1), ins=['(0 projects)', '<h3>No projects found']) app.config['SCRAPY_PROJECTS_DIR'] = os.path.join(cst.CWD, 'data', 'one_project_inside') req_single_scrapyd(app, client, view='deploy', kws=dict(node=1), ins='(1 project)', nos='<h3>NO projects found') app.config['SCRAPY_PROJECTS_DIR'] = '' req_single_scrapyd(app, client, view='deploy', kws=dict(node=1), ins=DEMO_PROJECTS_PATH.replace('\\', '/'), nos='<h3>NO projects found')
def test_node_reports_pass(app, client): with app.test_request_context(): url_report = url_for('log', node=1, opt='report', project='PROJECT_PLACEHOLDER', spider='SPIDER_PLACEHOLDER', job='JOB_PLACEHOLDER') ins = ["url_report: '%s'," % url_report, "start: '", "finish: '"] req(app, client, view='nodereports', kws=dict(node=1), ins=ins) req_single_scrapyd(app, client, view='nodereports', kws=dict(node=1), ins=ins)
def test_history(app, client): req_single_scrapyd( app, client, view='schedule.history', kws=dict(), ins=['run_spider_history.log', ' -d arg1=%s' % metadata['value']])
def test_node_reports_fail(app, client): ins = ['<title>fail - ScrapydWeb</title>', '<h3>status_code: -1</h3>'] req(app, client, view='nodereports', kws=dict(node=2), ins=ins) req_single_scrapyd(app, client, view='nodereports', kws=dict(node=1), ins=ins, set_to_second=True)
def test_run_fail(app, client): req_single_scrapyd( app, client, view='schedule.run', kws=dict(node=1), data={'filename': '%s_%s_%s.pickle' % (PROJECT, VERSION, SPIDER)}, ins='Fail to schedule', set_to_second=True)
def test_auto_packaging(app, client): data = { 'folder': cst.PROJECT, 'project': cst.PROJECT, 'version': cst.VERSION, } with app.test_request_context(): # http://localhost/1/schedule/ScrapydWeb_demo/2018-01-01T01_01_01/ req_single_scrapyd(app, client, view='deploy.upload', kws=dict(node=1), data=data, location=url_for('schedule', node=1, project=cst.PROJECT, version=cst.VERSION))
def test_auto_packaging_unicode(app, client): if cst.WINDOWS_NOT_CP936: return data = { 'folder': u'demo - 副本', 'project': u'demo - 副本', 'version': cst.VERSION, } with app.test_request_context(): req_single_scrapyd(app, client, view='deploy.upload', kws=dict(node=1), data=data, location=url_for('schedule', node=1, project='demo_____', version=cst.VERSION))
def test_schedule_with_url_project(app, client): ins = [ "selectedProject = '%s'" % PROJECT, "selectedVersion = 'default: the latest version'", "this.loadSpiders();", "selectedSpider = '%s'" % SPIDER ] kws = dict(node=1, project=PROJECT, version=DEFAULT_LATEST_VERSION, spider=SPIDER) req_single_scrapyd(app, client, view='schedule.schedule', kws=kws, ins=ins)
def test_items(app, client): try: req_single_scrapyd(app, client, view='items', kws=dict(node=1), ins='Directory listing for /items/') except AssertionError: req_single_scrapyd(app, client, view='items', kws=dict(node=1), ins='No Such Resource')
def test_schedule_xhr(app, client): req_single_scrapyd(app, client, view='schedule.xhr', kws=dict(node=1, filename=FILENAME), jskws=dict(status=cst.OK, jobid=cst.JOBID)) req_single_scrapyd(app, client, view='api', kws=dict(node=1, opt='forcestop', project=cst.PROJECT, version_spider_job=cst.JOBID))
def test_check_browser(app, client): ins = 'checkBrowser();' req_single_scrapyd(app, client, view='jobs', kws=dict(node=1), headers=cst.HEADERS_DICT['IE'], ins=ins) req_single_scrapyd(app, client, view='jobs', kws=dict(node=1), headers=cst.HEADERS_DICT['EDGE'], ins=ins)
def test_check_update(app, client): @app.context_processor def inject_variable(): return dict(CHECK_LATEST_VERSION_FREQ=1) req_single_scrapyd(app, client, view='jobs', kws=dict(node=1), ins='<script>setTimeout("checkLatestVersion(', nos='<!-- <script>setTimeout("checkLatestVersion(') req_single_scrapyd(app, client, view='jobs', kws=dict(node=1, ui='mobile'), mobileui=True, ins='<script>setTimeout("checkLatestVersion(', nos='<!-- <script>setTimeout("checkLatestVersion(') @app.context_processor def inject_variable(): return dict(CHECK_LATEST_VERSION_FREQ=100) req_single_scrapyd(app, client, view='jobs', kws=dict(node=1), nos='<script>setTimeout("checkLatestVersion(') req_single_scrapyd(app, client, view='jobs', kws=dict(node=1, ui='mobile'), mobileui=True, nos='<script>setTimeout("checkLatestVersion(')
def test_schedule_xhr(app, client): req_single_scrapyd(app, client, view='schedule.schedule_xhr', kws=dict(node=1, filename='%s_%s_%s.pickle' % (PROJECT, VERSION, SPIDER)), jskws=dict(status=OK, jobid=JOBID)) req_single_scrapyd(app, client, view='api', kws=dict(node=1, opt='forcestop', project=PROJECT, version_spider_job=JOBID))
def test_scrapy_cfg_node_not_exist(app, client): with app.test_request_context(): for folder, result in cst.SCRAPY_CFG_DICT.items(): data = { 'folder': folder, 'project': cst.PROJECT, 'version': cst.VERSION, } nos = [] if folder == 'demo_only_scrapy_cfg' or not result: ins = 'Fail to deploy project, got status' else: ins = ['Fail to deploy', result] nos = 'got status' req_single_scrapyd(app, client, view='deploy.upload', kws=dict(node=1), data=data, ins=ins, nos=nos, set_to_second=True)
def test_check(app, client): data = { 'project': PROJECT, '_version': VERSION, 'spider': SPIDER, 'jobid': JOBID, 'USER_AGENT': 'chrome', 'COOKIES_ENABLED': 'False', 'ROBOTSTXT_OBEY': 'False', 'CONCURRENT_REQUESTS': '1', 'DOWNLOAD_DELAY': '2', 'additional': '-d setting=CLOSESPIDER_TIMEOUT=60 \r\n-d setting=CLOSESPIDER_PAGECOUNT=10 \r\n-d arg1=val1' } data_ = { 'project': PROJECT, '_version': DEFAULT_LATEST_VERSION, 'spider': SPIDER, 'additional': '-d setting=CLOSESPIDER_TIMEOUT=60 -d arg1' } req_single_scrapyd(app, client, view='schedule.check', kws=dict(node=1), data=data, jskws=dict(filename='%s_%s_%s.pickle' % (PROJECT, VERSION, SPIDER))) req_single_scrapyd( app, client, view='schedule.check', kws=dict(node=1), data=data_, jskws=dict(filename='%s_%s_%s.pickle' % (PROJECT, 'default-the-latest-version', SPIDER)))
def test_run(app, client): with app.test_request_context(): req_single_scrapyd(app, client, view='schedule.run', kws=dict(node=1), data=dict(filename=FILENAME), location=url_for('jobs', node=1)) sleep() ins = [ 'JOB: %s' % cst.JOBID, 'USER_AGENT: Mozilla/5.0 (Windows', 'ROBOTSTXT_OBEY: False', 'COOKIES_ENABLED: False', 'CONCURRENT_REQUESTS: 1', 'DOWNLOAD_DELAY: 2', 'CLOSESPIDER_TIMEOUT: 60', 'CLOSESPIDER_PAGECOUNT: 10', 'self.arg1: %s' % metadata['value'] ] req_single_scrapyd(app, client, view='log', kws=dict(node=1, opt='utf8', project=cst.PROJECT, spider=cst.SPIDER, job=cst.JOBID), ins=ins) req_single_scrapyd(app, client, view='api', kws=dict(node=1, opt='forcestop', project=cst.PROJECT, version_spider_job=cst.JOBID))
def test_index(app, client): with app.test_request_context(): for __, headers in cst.HEADERS_DICT.items(): req_single_scrapyd(app, client, view='index', kws=dict(ui='mobile'), headers=headers, location=url_for('jobs', node=1, ui='mobile')) for key in ['Chrome', 'iPad']: req_single_scrapyd(app, client, view='index', kws={}, headers=cst.HEADERS_DICT[key], location=url_for( 'jobs', node=1)) # not the Servers page for key in ['iPhone', 'Android']: req_single_scrapyd(app, client, view='index', kws={}, headers=cst.HEADERS_DICT[key], location=url_for('jobs', node=1, ui='mobile'))
def test_dropdown_for_mobile_device(app, client): req_single_scrapyd(app, client, view='dashboard', kws=dict(node=1), headers=cst.HEADERS_DICT['Chrome'], ins='dropdown.css', nos=['dropdown_mobileui.css', 'handleDropdown();']) req_single_scrapyd(app, client, view='dashboard', kws=dict(node=1), headers=cst.HEADERS_DICT['iPhone'], nos='dropdown.css', ins=['dropdown_mobileui.css', 'handleDropdown();']) req_single_scrapyd(app, client, view='dashboard', kws=dict(node=1), headers=cst.HEADERS_DICT['iPad'], nos='dropdown.css', ins=['dropdown_mobileui.css', 'handleDropdown();'])
def test_execute_task_exception(app, client): check_data_ = dict(check_data) check_data_.update(action='add') req(app, client, view='schedule.check', kws=dict(node=NODE), data=check_data_, jskws=dict(cmd="-d _version=%s" % cst.VERSION, filename=FILENAME)) with app.test_request_context(): text, __ = req(app, client, view='schedule.run', kws=dict(node=NODE), data=run_data, location=url_for('tasks', node=NODE)) m = re.search(cst.TASK_NEXT_RUN_TIME_PATTERN, unquote_plus(text)) task_id = int(m.group(1)) print("task_id: %s" % task_id) __, js = req(app, client, view='tasks.xhr', kws=dict(node=NODE, action='dump', task_id=task_id)) assert js['data']['selected_nodes'] == [1, 2] # req_single_scrapyd would set single_scrapyd=True req_single_scrapyd(app, client, view='tasks.xhr', kws=dict(node=1, action='fire', task_id=task_id)) sleep() req(app, client, view='tasks', kws=dict(node=1), ins=["id: %s," % task_id, "prev_run_result: 'FAIL 1, PASS 1',", "fail_times: 1,", "run_times: 'FAIL 1 / 1',"]) text, __ = req(app, client, view='tasks', kws=dict(node=1, task_id=task_id), ins=["fail_count: 1,", "pass_count: 1,", ":total='1'"]) with app.test_request_context(): url_delete = url_for('tasks.xhr', node=1, action='delete', task_id=task_id) # in the task results page: url_action: '/1/tasks/xhr/delete/5/10/', task_result_id = int(re.search(r'%s(\d+)/' % url_delete, text).group(1)) print("task_result_id: %s" % task_result_id) # In baseview.py: assert 0 < self.node <= self.SCRAPYD_SERVERS_AMOUNT # Note that AssertionError would be raise directly in test, whereas internal_server_error() would return 500.html # instead when the app is actually running, getting '500 error node index error: 2, which should be between 1 and 1' req(app, client, view='tasks', kws=dict(node=1, task_id=task_id, task_result_id=task_result_id), ins=["node: 1,", "server: '%s'," % app.config['SCRAPYD_SERVERS'][0], "status_code: 200,", "status: 'ok',", "node: 2,", "status_code: -1,", "status: 'exception',", "node index error", ":total='2'"]) req(app, client, view='tasks.xhr', kws=dict(node=1, action='delete', task_id=task_id))
def test_check(app, client): # ScrapydWeb_demo.egg: custom_settings = {}, also log settings & arguments upload_file_deploy(app, client, filename='ScrapydWeb_demo.egg', project=cst.PROJECT, redirect_project=cst.PROJECT) data = dict( project=cst.PROJECT, _version=cst.VERSION, spider=cst.SPIDER, jobid=cst.JOBID, USER_AGENT='Chrome', ROBOTSTXT_OBEY='False', COOKIES_ENABLED='False', CONCURRENT_REQUESTS='1', DOWNLOAD_DELAY='2', additional= ("-d setting=CLOSESPIDER_TIMEOUT=60 \r\n-d setting=CLOSESPIDER_PAGECOUNT=10 \r\n" "-d arg1=%s") % metadata['value']) data_ = dict(project=cst.PROJECT, _version=cst.DEFAULT_LATEST_VERSION, spider=cst.SPIDER, additional="-d setting=CLOSESPIDER_TIMEOUT=60 -d arg1") req_single_scrapyd(app, client, view='schedule.check', kws=dict(node=1), data=data, jskws=dict(filename=FILENAME)) req_single_scrapyd( app, client, view='schedule.check', kws=dict(node=1), data=data_, jskws=dict(filename='%s_%s_%s.pickle' % (cst.PROJECT, 'default-the-latest-version', cst.SPIDER)))
def test_run(app, client): # ScrapydWeb_demo.egg: custom_settings = {}, also log settings & arguments upload_file_deploy(app, client, filename='ScrapydWeb_demo.egg', project=cst.PROJECT, redirect_project=cst.PROJECT) with app.test_request_context(): req_single_scrapyd(app, client, view='schedule.run', kws=dict(node=1), data=dict(filename='%s_%s_%s.pickle' % (cst.PROJECT, cst.VERSION, cst.SPIDER)), location=url_for('dashboard', node=1)) sleep() ins = [ 'JOB: %s' % cst.JOBID, 'USER_AGENT: Mozilla/5.0', 'COOKIES_ENABLED: False', 'ROBOTSTXT_OBEY: False', 'CONCURRENT_REQUESTS: 1', 'DOWNLOAD_DELAY: 2', 'CLOSESPIDER_TIMEOUT: 60', 'CLOSESPIDER_PAGECOUNT: 10', 'self.arg1: val1' ] req_single_scrapyd(app, client, view='log', kws=dict(node=1, opt='utf8', project=cst.PROJECT, spider=cst.SPIDER, job=cst.JOBID), ins=ins) req_single_scrapyd(app, client, view='api', kws=dict(node=1, opt='forcestop', project=cst.PROJECT, version_spider_job=cst.JOBID))
def test_switch_node_skip(app, client): req_single_scrapyd(app, client, view='jobs', kws=dict(node=1), nos=['onclick="switchNode', 'id="skip_nodes_checkbox"'])
def test_history_log(app, client): req_single_scrapyd(app, client, view='schedule.history', kws=dict(filename='history.log'), ins='history.log')
def test_cluster_reports_not_exists(app, client): nos = ['<span>Cluster Reports</span>', '<el-tab-pane label="Get Reports"'] req_single_scrapyd(app, client, view='servers', kws=dict(node=1), nos=nos)
def test_page(app, client): for view, title in cst.VIEW_TITLE_MAP.items(): req_single_scrapyd(app, client, view=view, kws=dict(node=1), ins=title)