def test_scrapy_cfg_first_node_not_exist(app, client): switch_scrapyd(app) for folder, result in cst.SCRAPY_CFG_DICT.items(): data = { '1': 'on', '2': 'on', 'checked_amount': '2', 'folder': folder, 'project': cst.PROJECT, 'version': cst.VERSION, } nos = [] if folder == 'demo_only_scrapy_cfg' or not result: ins = [ 'fail - ScrapydWeb', 'the first selected node returned status' ] else: ins = ['fail - ScrapydWeb', result] nos = 'the first selected node returned status' req(app, client, view='deploy.upload', kws=dict(node=2), data=data, ins=ins, nos=nos)
def test_switch_template(app, client): task_id = metadata['task_id'] task_result_id = metadata['task_result_id'] req(app, client, view='tasks.xhr', kws=dict(node=NODE, action='delete', task_id=task_id, task_result_id=task_result_id)) req(app, client, view='tasks', kws=dict(node=NODE, task_id=task_id), ins=["status_code: 200,", "status: 'ok',", ":total='1'"], nos=["status_code: -1,", "status: 'error',", 'label="Fail count"', 'label="Server"']) switch_scrapyd(app) req(app, client, view='tasks.xhr', kws=dict(node=NODE, action='fire', task_id=task_id)) sleep(2) req(app, client, view='tasks', kws=dict(node=NODE), ins=["id: %s," % task_id, "prev_run_result: 'FAIL 0, PASS 0',", "run_times: 2,"]) req(app, client, view='tasks', kws=dict(node=NODE, task_id=task_id), ins=['label="Fail count"', "pass_count: 0,", "fail_count: 0,", "pass_count: 1,", ":total='2'"], nos=['label="Server"', "status_code:", "status:"]) sleep(28) req(app, client, view='tasks', kws=dict(node=NODE), ins=["id: %s," % task_id, "prev_run_result: 'FAIL 1, PASS 0',", "run_times: 'FAIL 1 / 2',"]) req(app, client, view='tasks', kws=dict(node=NODE, task_id=task_id), ins=["status_code: 200,", "status: 'ok',", "status_code: -1,", "status: 'error',", ":total='2'"], nos=['label="Fail count"', 'label="Server"']) req(app, client, view='tasks.xhr', kws=dict(node=NODE, action='delete', task_id=task_id))
def test_run_fail(app, client): switch_scrapyd(app) req(app, client, view='schedule.run', kws=dict(node=NODE), data=run_data, ins='Multinode schedule terminated')
def test_upload_file_deploy(app, client): upload_file_deploy_multinode = partial(upload_file_deploy, app=app, client=client, multinode=True) filenames = [ 'demo.egg', 'demo_inner.zip', 'demo_outer.zip', 'demo - Win7CNsendzipped.zip', 'demo - Win10cp1252.zip' ] if cst.WINDOWS_NOT_CP936: filenames.extend([ 'demo - Ubuntu.zip', 'demo - Ubuntu.tar.gz', 'demo - macOS.zip', 'demo - macOS.tar.gz' ]) else: filenames.extend([ u'副本.zip', u'副本.tar.gz', u'副本.egg', u'demo - 副本 - Win7CN.zip', u'demo - 副本 - Win7CNsendzipped.zip', u'demo - 副本 - Win10cp936.zip', u'demo - 副本 - Ubuntu.zip', u'demo - 副本 - Ubuntu.tar.gz', u'demo - 副本 - macOS.zip', u'demo - 副本 - macOS.tar.gz' ]) for filename in filenames: if filename == 'demo.egg': project = cst.PROJECT redirect_project = cst.PROJECT else: project = re.sub(r'\.egg|\.zip|\.tar\.gz', '', filename) project = 'demo_unicode' if project == u'副本' else project redirect_project = re.sub(cst.STRICT_NAME_PATTERN, '_', project) upload_file_deploy_multinode(filename=filename, project=project, redirect_project=redirect_project) for filename, alert in cst.SCRAPY_CFG_DICT.items(): if alert: upload_file_deploy_multinode(filename='%s.zip' % filename, project=filename, alert=alert, fail=True) else: upload_file_deploy_multinode(filename='%s.zip' % filename, project=filename, redirect_project=filename) switch_scrapyd(app) for filename, alert in cst.SCRAPY_CFG_DICT.items(): if filename == 'demo_only_scrapy_cfg' or not alert: alert = 'the first selected node returned status' upload_file_deploy_multinode(filename='%s.zip' % filename, project=filename, alert=alert, fail=True)
def test_page(app, client): for view, title in cst.VIEW_TITLE_MAP.items(): req(app, client, view=view, kws=dict(node=1), ins=title) req(app, client, view='jobs', kws=dict(node=2), ins='status_code: -1') req(app, client, view='items', kws=dict(node=2), ins='status_code: -1') req(app, client, view='logs', kws=dict(node=2), ins='status_code: -1') switch_scrapyd(app) for view, title in cst.VIEW_TITLE_MAP.items(): req(app, client, view=view, kws=dict(node=2), ins=title)
def test_page(app, client): for view, title in cst.VIEW_TITLE_MAP.items(): req(app, client, view=view, kws=dict(node=1), ins=title) # When v*p*n is globally enabled for testing Telegram, got 500 Internal Privoxy Error ins = ['fail - ScrapydWeb', 'status_code: -1'] for view in ['jobs', 'logs', 'items']: req(app, client, view=view, kws=dict(node=2), ins=ins) switch_scrapyd(app) for view, title in cst.VIEW_TITLE_MAP.items(): req(app, client, view=view, kws=dict(node=2), ins=title)
def test_run_fail(app, client): data = { '1': 'on', '2': 'on', 'checked_amount': '2', 'filename': '%s_%s_%s.pickle' % (cst.PROJECT, cst.VERSION, cst.SPIDER) } switch_scrapyd(app) req(app, client, view='schedule.run', kws=dict(node=2), data=data, ins='Multinode schedule terminated')