Example #1
0
def test_parse_upload(app, client):
    req(app,
        client,
        view='parse.upload',
        kws=dict(node=1),
        data={'file': (BytesIO(b'my file contents'), "fake.log")},
        location='/parse/uploaded/')
Example #2
0
def test_run_with_task(app, client):
    # ScrapydWeb_demo.egg: custom_settings = {}, also output specific settings & arguments in the log
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo_no_request.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    req(app,
        client,
        view='tasks.xhr',
        kws=dict(node=NODE, action='enable'),
        ins='STATE_RUNNING',
        nos='STATE_PAUSED')

    with app.test_request_context():
        text, __ = req(app,
                       client,
                       view='schedule.run',
                       kws=dict(node=NODE),
                       data=run_data,
                       location=url_for('tasks', node=NODE))
    m = re.search(cst.TASK_NEXT_RUN_TIME_PATTERN, unquote_plus(text))
    task_id = int(m.group(1))
    print("task_id: %s" % task_id)
    metadata['task_id'] = task_id

    __, js = req(app,
                 client,
                 view='tasks.xhr',
                 kws=dict(node=NODE, action='dump', task_id=task_id))
    assert js['data']['selected_nodes'] == [1, 2]
Example #3
0
def test_dashboard(app, client):
    req(app,
        client,
        view='dashboard',
        kws=dict(node=1, ui='mobile'),
        ins='dashboard - mobileui - ScrapydWeb',
        mobileui=True)
Example #4
0
def test_node_reports_pass(app, client):
    with app.test_request_context():
        url_report = url_for('log', node=1, opt='report', project='PROJECT_PLACEHOLDER',
                             spider='SPIDER_PLACEHOLDER', job='JOB_PLACEHOLDER')
    ins = ["url_report: '%s'," % url_report, "start: '", "finish: '"]
    req(app, client, view='nodereports', kws=dict(node=1), ins=ins)
    req_single_scrapyd(app, client, view='nodereports', kws=dict(node=1), ins=ins)
Example #5
0
def test_enable_logparser(app, client):
    def json_loads_from_file(path):
        with io.open(path, 'r', encoding='utf-8') as f:
            return json.loads(f.read())

    # In conftest.py: ENABLE_LOGPARSER=False
    assert not os.path.exists(app.config['STATS_JSON_PATH'])
    assert not os.path.exists(app.config['DEMO_JSON_PATH'])
    app.config['ENABLE_LOGPARSER'] = True
    app.config['ENABLE_EMAIL'] = False

    # ['username:[email protected]:6800', ]
    app.config['SCRAPYD_SERVERS'] = app.config['_SCRAPYD_SERVERS']
    check_app_config(app.config)

    logparser_pid = app.config['LOGPARSER_PID']
    assert isinstance(logparser_pid, int) and logparser_pid > 0
    assert app.config['POLL_PID'] is None
    req(app, client, view='settings', kws=dict(node=1), ins='logparser_pid: %s' % logparser_pid)

    sleep()

    stats_json = json_loads_from_file(app.config['STATS_JSON_PATH'])
    assert stats_json['logparser_version'] == cst.LOGPARSER_VERSION
    assert cst.DEMO_JOBID in stats_json['datas'][cst.PROJECT][cst.SPIDER]
    demo_json = json_loads_from_file(app.config['DEMO_JSON_PATH'])
    assert demo_json['runtime'] == '0:01:08'
    assert demo_json['finish_reason'] == 'finished'
    assert demo_json['logparser_version'] == cst.LOGPARSER_VERSION
Example #6
0
def test_auto_packaging(app, client):
    data = {
        '1': 'on',
        'checked_amount': '1',
        'folder': cst.PROJECT,
        'project': cst.PROJECT,
        'version': cst.VERSION
    }
    req(app,
        client,
        view='deploy.upload',
        kws=dict(node=2),
        data=data,
        ins=[
            'deploy results - ScrapydWeb', 'onclick="multinodeRunSpider();"',
            'id="checkbox_1"'
        ],
        nos='id="checkbox_2"')

    data.update({'2': 'on', 'checked_amount': '2'})
    req(app,
        client,
        view='deploy.upload',
        kws=dict(node=2),
        data=data,
        ins=[
            'deploy results - ScrapydWeb', 'onclick="multinodeRunSpider();"',
            'id="checkbox_1"', 'id="checkbox_2"'
        ])
Example #7
0
def test_check_with_task(app, client):
    req(app,
        client,
        view='schedule.check',
        kws=dict(node=NODE),
        data=check_data,
        jskws=dict(cmd="-d _version=%s" % cst.VERSION, filename=FILENAME))
Example #8
0
def test_check_app_config(app, client):
    cleantest(app, client)

    # In conftest.py: ENABLE_LOGPARSER=False
    assert not os.path.exists(app.config['STATS_JSON_PATH'])
    check_app_config(app.config)
    strings = []

    assert app.config['LOGPARSER_PID'] is None
    strings.append('logparser_pid: None')

    poll_pid = app.config['POLL_PID']
    if app.config.get('ENABLE_EMAIL', False):
        assert isinstance(poll_pid, int) and poll_pid > 0
    else:
        assert poll_pid is None
    strings.append('poll_pid: %s' % poll_pid)

    req(app, client, view='settings', kws=dict(node=1), ins=strings)
    assert not os.path.exists(app.config['STATS_JSON_PATH'])

    # Test ENABLE_EMAIL = False
    if app.config.get('ENABLE_EMAIL', False):
        app.config['ENABLE_EMAIL'] = False
        check_app_config(app.config)
        assert app.config['LOGPARSER_PID'] is None
        assert app.config['POLL_PID'] is None
        req(app,
            client,
            view='settings',
            kws=dict(node=1),
            ins='poll_pid: None')
Example #9
0
def test_jobs(app, client):
    req(app,
        client,
        view='jobs',
        kws=dict(node=1, ui='mobile'),
        ins='jobs - mobileui - ScrapydWeb',
        mobileui=True)
Example #10
0
def test_select_multinode_checkbox(app, client):
    for view in ['deploy.deploy', 'schedule.schedule']:
        req(app,
            client,
            view=view,
            kws=dict(node=2),
            ins='CheckAll / UncheckAll')
Example #11
0
def test_daemonstatus(app, client):
    req(app,
        client,
        view='api',
        kws=dict(node=1, opt='daemonstatus'),
        jskws=dict(status=OK),
        jskeys=['pending', 'running', 'finished'])
Example #12
0
def test_run(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    data = {
        '1': 'on',
        '2': 'on',
        'checked_amount': '2',
        'filename': '%s_%s_%s.pickle' % (cst.PROJECT, cst.VERSION, cst.SPIDER)
    }
    req(app,
        client,
        view='schedule.run',
        kws=dict(node=2),
        data=data,
        ins=[
            'run results - ScrapydWeb', 'id="checkbox_2"',
            'onclick="passToOverview();"'
        ])

    req(app,
        client,
        view='api',
        kws=dict(node=1,
                 opt='forcestop',
                 project=cst.PROJECT,
                 version_spider_job=cst.JOBID))
Example #13
0
def test_listversions(app, client):
    req(app,
        client,
        view='api',
        kws=dict(node=1, opt='listversions', project=PROJECT),
        jskws=dict(status=OK),
        jskeys='versions')
Example #14
0
def test_scrapy_cfg_first_node_not_exist(app, client):
    switch_scrapyd(app)
    for folder, result in cst.SCRAPY_CFG_DICT.items():
        data = {
            '1': 'on',
            '2': 'on',
            'checked_amount': '2',
            'folder': folder,
            'project': cst.PROJECT,
            'version': cst.VERSION,
        }
        nos = []
        if folder == 'demo_only_scrapy_cfg' or not result:
            ins = [
                'fail - ScrapydWeb', 'the first selected node returned status'
            ]
        else:
            ins = ['fail - ScrapydWeb', result]
            nos = 'the first selected node returned status'
        req(app,
            client,
            view='deploy.upload',
            kws=dict(node=2),
            data=data,
            ins=ins,
            nos=nos)
Example #15
0
def test_listprojects(app, client):
    req(app,
        client,
        view='api',
        kws=dict(node=1, opt='listprojects'),
        jskws=dict(status=OK),
        jskeys='projects')
Example #16
0
def test_index(app, client):
    with app.test_request_context():
        req(app,
            client,
            view='index',
            kws=dict(ui='mobile'),
            location=url_for('dashboard', node=1, ui='mobile'))
Example #17
0
def test_listjobs(app, client):
    req(app,
        client,
        view='api',
        kws=dict(node=1, opt='listjobs', project=PROJECT),
        jskws=dict(status=OK, url='listjobs.json'),
        jskeys=['pending', 'running', 'finished'])
Example #18
0
def test_schedule_xhr(app, client):
    req(app,
        client,
        view='schedule.schedule_xhr',
        kws=dict(node=2,
                 filename='%s_%s_%s.pickle' %
                 (cst.PROJECT, cst.VERSION, cst.SPIDER)),
        jskws=dict(status=cst.ERROR))
Example #19
0
 def forcestop_a_job(job):
     req(app,
         client,
         view='api',
         kws=dict(node=1,
                  opt='forcestop',
                  project=cst.PROJECT,
                  version_spider_job=job))
Example #20
0
 def post_for_poll(job, job_finished=''):
     kws = dict(node=1,
                opt='stats',
                project=cst.PROJECT,
                spider=cst.SPIDER,
                job=job,
                job_finished=job_finished)
     req(app, client, view='log', kws=kws, data={}, ins='Log analysis')
Example #21
0
 def post_for_caching(job, job_finished=''):
     kws = dict(node=1,
                opt='stats',
                project=PROJECT,
                spider=SPIDER,
                job=job,
                job_finished=job_finished)
     req(app, client, view='log', kws=kws, data={}, ins='Stats collection')
Example #22
0
def test_run_fail(app, client):
    switch_scrapyd(app)
    req(app,
        client,
        view='schedule.run',
        kws=dict(node=NODE),
        data=run_data,
        ins='Multinode schedule terminated')
Example #23
0
def multinode_command(app, client, opt, title, project, version_job=None):
    data = {'1': 'on', '2': 'on'}
    req(app,
        client,
        view='multinode',
        kws=dict(node=1, opt=opt, project=project, version_job=version_job),
        data=data,
        ins=[title, 'id="checkbox_1"', 'id="checkbox_2"'])
Example #24
0
def test_stats_with_logparser_disabled(app, client):
    kws = dict(node=1, opt='stats', project=cst.PROJECT, spider=cst.SPIDER, job=cst.DEMO_JOBID)
    req(app, client, view='log', kws=kws,
        ins=["Using local logfile:", 'id="finish_reason">finished<'], nos='refresh_button')

    kws['job'] = cst.DEMO_UNFINISHED_LOG.split('.')[0]
    ins = ['id="finish_reason">N/A<', '<a id="refresh_button"', "var by = 'ScrapydWeb';"]
    req(app, client, view='log', kws=kws, ins=ins)
Example #25
0
def test_forcestop(app, client):
    req(app,
        client,
        view='api',
        kws=dict(node=1,
                 opt='forcestop',
                 project=PROJECT,
                 version_spider_job=JOBID),
        jskws=dict(status=OK, times=2, prevstate=None))
Example #26
0
def test_api_stop(app, client):
    sleep()
    req(app,
        client,
        view='api',
        kws=dict(node=1, opt='stop', project=PROJECT,
                 version_spider_job=jobid),
        jskws=dict(status=OK, prevstate='running'),
        nos='times')
Example #27
0
def listspiders(app, client, version):
    req(app,
        client,
        view='api',
        kws=dict(node=1,
                 opt='listspiders',
                 project=PROJECT,
                 version_spider_job=version),
        jskws=dict(status=OK, spiders=SPIDER))
Example #28
0
def test_log_not_exist(app, client):
    # the Stats page
    kws = dict(node=1, opt='stats', project=cst.PROJECT, spider=cst.SPIDER, job=cst.FAKE_JOBID)
    ins = ['fail - ScrapydWeb', 'status_code: 404']
    req(app, client, view='log', kws=kws, ins=ins)
    # the Log page
    kws['opt'] = 'utf8'
    ins = ['fail - ScrapydWeb', 'status_code: 404']
    req(app, client, view='log', kws=kws, ins=ins)
Example #29
0
def test_parse_source_demo_log(app, client):
    req(app,
        client,
        view='parse.source',
        kws=dict(filename=cst.DEMO_LOG),
        ins=[
            '2018-10-23 18:28:34 [scrapy.utils.log] INFO: Scrapy 1.5.0 started (bot: demo)',
            '2018-10-23 18:29:42 [scrapy.core.engine] INFO: Spider closed (finished)'
        ])
Example #30
0
def test_auto_packaging_select_option(app, client):
    ins = [
        '(14 projects)', u"var folders = ['demo - 副本', 'demo',",
        "var projects = ['demo-copy', 'demo',",
        '<div>%s<' % cst.PROJECT, u'<div>demo - 副本<', '<div>demo<',
        '<div>demo_only_scrapy_cfg<'
    ]
    nos = ['<div>demo_without_scrapy_cfg<', '<h3>NO projects found']
    req(app, client, view='deploy', kws=dict(node=2), ins=ins, nos=nos)