Ejemplo n.º 1
0
def test_run(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    data = {
        '1': 'on',
        '2': 'on',
        'checked_amount': '2',
        'filename': '%s_%s_%s.pickle' % (cst.PROJECT, cst.VERSION, cst.SPIDER)
    }
    req(app,
        client,
        view='schedule.run',
        kws=dict(node=2),
        data=data,
        ins=[
            'run results - ScrapydWeb', 'id="checkbox_2"',
            'onclick="passToOverview();"'
        ])

    req(app,
        client,
        view='api',
        kws=dict(node=1,
                 opt='forcestop',
                 project=cst.PROJECT,
                 version_spider_job=cst.JOBID))
Ejemplo n.º 2
0
def test_log_utf8_stats(app, client):
    upload_file_deploy(app, client, filename='demo.egg', project=cst.PROJECT, redirect_project=cst.PROJECT)

    with app.test_request_context():
        kws = dict(node=1, opt='start', project=cst.PROJECT, version_spider_job=cst.SPIDER)
        __, js = req(app, client, view='api', kws=kws)
        jobid = js['jobid']

        sleep()

        # the Stats page
        req(app, client, view='log', kws=dict(node=1, opt='stats', project=cst.PROJECT, spider=cst.SPIDER, job=jobid),
            ins='Stats collection')
        # the Log page
        req(app, client, view='log', kws=dict(node=1, opt='utf8', project=cst.PROJECT, spider=cst.SPIDER, job=jobid),
            ins='log - ScrapydWeb')

        # For testing request_scrapy_log() of LogView in log.py
        app.config['SCRAPYD_LOGS_DIR'] = 'dir-not-exist'
        req(app, client, view='log', kws=dict(node=1, opt='utf8', project=cst.PROJECT, spider=cst.SPIDER, job=jobid),
            ins='log - ScrapydWeb')

        # the Dashboard page
        url_stop = url_for('api', node=1, opt='stop', project=cst.PROJECT, version_spider_job=jobid)
        req(app, client, view='dashboard', kws=dict(node=1), ins=url_stop)

        client.get(url_for('api', node=1, opt='forcestop', project=cst.PROJECT, version_spider_job=jobid))

        # /1/schedule/ScrapydWeb_demo/default:%20the%20latest%20version/test/
        url_start = url_for('schedule.schedule', node=1, project=cst.PROJECT,
                            version=cst.DEFAULT_LATEST_VERSION, spider=cst.SPIDER)
        req(app, client, view='dashboard', kws=dict(node=1), ins=url_start)
Ejemplo n.º 3
0
def test_run(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('schedule.run', node=2)
        data = {
            '1': 'on',
            '2': 'on',
            'checked_amount': '2',
            'filename': '%s_%s_%s.pickle' % (PROJECT, VERSION, SPIDER)
        }
        response = client.post(url, data=data)
        text = get_text(response)
        assert ('run results - ScrapydWeb' in text
                and 'id="checkbox_2"' in text
                and 'onclick="passToOverview();"' in text)

        client.get(
            url_for('api',
                    node=1,
                    opt='forcestop',
                    project=PROJECT,
                    version_spider_job=JOBID))
Ejemplo n.º 4
0
def test_run_with_task(app, client):
    # ScrapydWeb_demo.egg: custom_settings = {}, also output specific settings & arguments in the log
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo_no_request.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    req(app,
        client,
        view='tasks.xhr',
        kws=dict(node=NODE, action='enable'),
        ins='STATE_RUNNING',
        nos='STATE_PAUSED')

    with app.test_request_context():
        text, __ = req(app,
                       client,
                       view='schedule.run',
                       kws=dict(node=NODE),
                       data=run_data,
                       location=url_for('tasks', node=NODE))
    m = re.search(cst.TASK_NEXT_RUN_TIME_PATTERN, unquote_plus(text))
    task_id = int(m.group(1))
    print("task_id: %s" % task_id)
    metadata['task_id'] = task_id

    __, js = req(app,
                 client,
                 view='tasks.xhr',
                 kws=dict(node=NODE, action='dump', task_id=task_id))
    assert js['data']['selected_nodes'] == [1, 2]
def test_run(app, client):
    set_single_scrapyd(app)
    # ScrapydWeb-demo.egg: custom_settings = {}, also log settings & arguments
    upload_file_deploy(app, client, filename='ScrapydWeb-demo.egg', project=PROJECT, redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('schedule.run', node=1)
        data = {'filename': '%s_%s_%s.pickle' % (PROJECT, VERSION, SPIDER)}
        response = client.post(url, data=data)
        assert url_for('dashboard', node=1) in get_text(response)

        sleep()
        url = url_for('log', node=1, opt='utf8', project=PROJECT, spider=SPIDER, job=JOBID)
        response = client.get(url)
        text = get_text(response)
        assert 'JOB: %s' % JOBID in text
        assert 'USER_AGENT: Mozilla/5.0' in text
        assert 'COOKIES_ENABLED: False' in text
        assert 'ROBOTSTXT_OBEY: False' in text
        assert 'CONCURRENT_REQUESTS: 1' in text
        assert 'DOWNLOAD_DELAY: 2' in text
        assert 'CLOSESPIDER_TIMEOUT: 60' in text
        assert 'CLOSESPIDER_PAGECOUNT: 10' in text
        assert 'self.arg1: val1' in text

        client.get(url_for('api', node=1, opt='forcestop', project=PROJECT, version_spider_job=JOBID))
Ejemplo n.º 6
0
def test_stop(app, client):
    upload_file_deploy(app, client, filename='demo.egg', project=PROJECT, redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('api', node=1, opt='stop', project=PROJECT, version_spider_job=JOBID)
        response = client.get(url)
        js = load_json(response)
        assert js['status'] == OK and 'prevstate' in js and 'times' not in js  # js['prevstate'] == 'running'
Ejemplo n.º 7
0
def test_api_start(app, client):
    global jobid
    upload_file_deploy(app, client, filename='demo.egg', project=cst.PROJECT, redirect_project=cst.PROJECT)

    __, js = req(app, client, view='api',
                 kws=dict(node=1, opt='start', project=cst.PROJECT, version_spider_job=cst.SPIDER),
                 jskws=dict(status=cst.OK), jskeys='jobid')
    jobid = js['jobid']
Ejemplo n.º 8
0
def test_listprojects(app, client):
    # upload_file_deploy(app, client, filename='demo.egg', project=PROJECT, redirect_project=PROJECT)
    upload_file_deploy(app, client, filename='demo.zip', project=PROJECT, redirect_project=PROJECT)

    title = 'Get the list of projects uploaded'
    with app.test_request_context():
        url = url_for('manage', node=1)
        response = client.get(url)
        assert title in get_text(response) and not is_simple_ui(response)
Ejemplo n.º 9
0
def test_deploy_xhr(app, client):
    upload_file_deploy(app, client, filename='demo.egg', project=PROJECT, redirect_project=PROJECT, multinode=False)
    kws = dict(
        node=1,
        eggname='%s_%s_from_file_demo.egg' % (PROJECT, VERSION),
        project=PROJECT,
        version=VERSION
    )
    req(app, client, view='deploy.deploy_xhr', kws=kws, jskws=dict(status=OK, project=PROJECT))
Ejemplo n.º 10
0
def test_listspiders_del(app, client):
    with app.test_request_context():
        d = OrderedDict()  # For python 2 compatibility

        d['listspiders'] = dict(url=url_for('projects',
                                            node=1,
                                            opt='listspiders',
                                            project=cst.PROJECT,
                                            version_spider_job=cst.VERSION),
                                checks=['Run Spider (test)'])
        d['listspiders_fail'] = dict(
            url=url_for('projects',
                        node=1,
                        opt='listspiders',
                        project=cst.FAKE_PROJECT,
                        version_spider_job=cst.FAKE_VERSION),
            checks=['listspiders.json', 'No such file or directory'])

        d['delversion'] = dict(url=url_for('projects',
                                           node=1,
                                           opt='delversion',
                                           project=cst.PROJECT,
                                           version_spider_job=cst.VERSION),
                               checks=['version deleted'])
        d['delversion_fail'] = dict(
            url=url_for('projects',
                        node=1,
                        opt='delversion',
                        project=cst.FAKE_PROJECT,
                        version_spider_job=cst.FAKE_VERSION),
            checks=['delversion.json', 'See details below'])

        d['delproject'] = dict(url=url_for('projects',
                                           node=1,
                                           opt='delproject',
                                           project=cst.PROJECT),
                               checks=['project deleted'])
        d['delproject_fail'] = dict(
            url=url_for('projects',
                        node=1,
                        opt='delproject',
                        project=cst.FAKE_PROJECT),
            checks=['delproject.json', 'See details below'])

        for k, v in d.items():
            if k == 'delproject':  # Should use OrderedDict For python 2 compatibility
                upload_file_deploy(app,
                                   client,
                                   filename='demo.zip',
                                   project=cst.PROJECT,
                                   redirect_project=cst.PROJECT)
            response = client.get(v['url'])
            text = get_text(response)
            for c in v['checks']:
                assert c in text
Ejemplo n.º 11
0
def test_listprojects(app, client):
    # upload_file_deploy(app, client, filename='demo.egg', project=cst.PROJECT, redirect_project=cst.PROJECT)
    upload_file_deploy(app,
                       client,
                       filename='demo.zip',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    req(app,
        client,
        view='projects',
        kws=dict(node=1),
        ins='Get the list of projects uploaded')
Ejemplo n.º 12
0
def test_stop(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    req(app,
        client,
        view='api',
        kws=dict(node=1, opt='stop', project=PROJECT,
                 version_spider_job=JOBID),
        nos='times',
        jskws=dict(status=OK),
        jskeys='prevstate')
Ejemplo n.º 13
0
def test_api_start(app, client):
    global jobid
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('api',
                      node=1,
                      opt='start',
                      project=PROJECT,
                      version_spider_job=SPIDER,
                      ui='mobile')
        response = client.get(url)
        js = load_json(response)
        jobid = js['jobid']
        assert js['status'] == OK and js['jobid']
Ejemplo n.º 14
0
def test_run(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('schedule.run', node=1)
        data = {'filename': '%s_%s_%s.pickle' % (PROJECT, VERSION, SPIDER)}
        response = client.post(url, data=data)
        assert url_for('dashboard', node=1) in get_text(response)

        client.get(
            url_for('api',
                    node=1,
                    opt='forcestop',
                    project=PROJECT,
                    version_spider_job=JOBID))
Ejemplo n.º 15
0
def test_run(app, client):
    # ScrapydWeb_demo.egg: custom_settings = {}, also log settings & arguments
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    with app.test_request_context():
        req_single_scrapyd(app,
                           client,
                           view='schedule.run',
                           kws=dict(node=1),
                           data=dict(filename='%s_%s_%s.pickle' %
                                     (cst.PROJECT, cst.VERSION, cst.SPIDER)),
                           location=url_for('dashboard', node=1))

    sleep()

    ins = [
        'JOB: %s' % cst.JOBID, 'USER_AGENT: Mozilla/5.0',
        'COOKIES_ENABLED: False', 'ROBOTSTXT_OBEY: False',
        'CONCURRENT_REQUESTS: 1', 'DOWNLOAD_DELAY: 2',
        'CLOSESPIDER_TIMEOUT: 60', 'CLOSESPIDER_PAGECOUNT: 10',
        'self.arg1: val1'
    ]
    req_single_scrapyd(app,
                       client,
                       view='log',
                       kws=dict(node=1,
                                opt='utf8',
                                project=cst.PROJECT,
                                spider=cst.SPIDER,
                                job=cst.JOBID),
                       ins=ins)
    req_single_scrapyd(app,
                       client,
                       view='api',
                       kws=dict(node=1,
                                opt='forcestop',
                                project=cst.PROJECT,
                                version_spider_job=cst.JOBID))
Ejemplo n.º 16
0
def test_check(app, client):
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)
    data = dict(
        project=cst.PROJECT,
        _version=cst.VERSION,
        spider=cst.SPIDER,
        jobid=cst.JOBID,
        additional=
        "-d setting=LOGSTATS_INTERVAL=10"  # For the test_telnet_in_stats() below
    )
    req(app,
        client,
        view='schedule.check',
        kws=dict(node=NODE),
        data=data,
        jskws=dict(filename=FILENAME))
def test_check(app, client):
    # ScrapydWeb_demo.egg: custom_settings = {}, also log settings & arguments
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)
    data = dict(
        project=cst.PROJECT,
        _version=cst.VERSION,
        spider=cst.SPIDER,
        jobid=cst.JOBID,
        USER_AGENT='Chrome',
        ROBOTSTXT_OBEY='False',
        COOKIES_ENABLED='False',
        CONCURRENT_REQUESTS='1',
        DOWNLOAD_DELAY='2',
        additional=
        ("-d setting=CLOSESPIDER_TIMEOUT=60 \r\n-d setting=CLOSESPIDER_PAGECOUNT=10 \r\n"
         "-d arg1=%s") % metadata['value'])

    data_ = dict(project=cst.PROJECT,
                 _version=cst.DEFAULT_LATEST_VERSION,
                 spider=cst.SPIDER,
                 additional="-d setting=CLOSESPIDER_TIMEOUT=60 -d arg1")
    req_single_scrapyd(app,
                       client,
                       view='schedule.check',
                       kws=dict(node=1),
                       data=data,
                       jskws=dict(filename=FILENAME))
    req_single_scrapyd(
        app,
        client,
        view='schedule.check',
        kws=dict(node=1),
        data=data_,
        jskws=dict(filename='%s_%s_%s.pickle' %
                   (cst.PROJECT, 'default-the-latest-version', cst.SPIDER)))
Ejemplo n.º 18
0
def test_log_utf8_stats(app, client):
    upload_file_deploy(app, client, filename='demo.egg', project=PROJECT, redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('api', node=1, opt='start', project=PROJECT, version_spider_job=SPIDER)
        response = client.get(url)
        js = load_json(response)
        jobid = js['jobid']

        sleep()

        # UTF8 page
        url = url_for('log', node=1, opt='utf8', project=PROJECT, spider=SPIDER, job=jobid)
        response = client.get(url)
        assert 'utf8 - ScrapydWeb' in get_text(response) and not is_simple_ui(response)

        client.get(url_for('api', node=1, opt='forcestop', project=PROJECT, version_spider_job=jobid))

        # Stats page
        url = url_for('log', node=1, opt='stats', project=PROJECT, spider=SPIDER, job=jobid)
        response = client.get(url)
        assert 'Stats collection' in get_text(response) and not is_simple_ui(response)
Ejemplo n.º 19
0
def test_monitor_alert(app, client):
    # In ScrapydWeb_demo_no_delay.egg: unset CONCURRENT_REQUESTS, unset DOWNLOAD_DELAY
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo_no_delay.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    # with app.test_request_context():
    if not (app.config.get('ENABLE_MONITOR', False)
            and app.config.get('ENABLE_EMAIL_ALERT', False)):
        return

    def start_a_job():
        kws = dict(node=1,
                   opt='start',
                   project=cst.PROJECT,
                   version_spider_job=cst.SPIDER)
        __, js = req(app, client, view='api', kws=kws)
        sleep()
        return js['jobid']

    def forcestop_a_job(job):
        req(app,
            client,
            view='api',
            kws=dict(node=1,
                     opt='forcestop',
                     project=cst.PROJECT,
                     version_spider_job=job))

    def post_for_poll(job, job_finished=''):
        kws = dict(node=1,
                   opt='stats',
                   project=cst.PROJECT,
                   spider=cst.SPIDER,
                   job=job,
                   job_finished=job_finished)
        req(app, client, view='log', kws=kws, data={}, ins='Log analysis')

    # Simulate poll post 'Finished'
    app.config['ON_JOB_FINISHED'] = True
    jobid = start_a_job()
    post_for_poll(jobid, job_finished='True')
    forcestop_a_job(jobid)
    sleep()

    # Simulate poll post 'ForceStopped'
    app.config['ON_JOB_FINISHED'] = False
    app.config['LOG_CRITICAL_THRESHOLD'] = 1
    app.config['LOG_CRITICAL_TRIGGER_FORCESTOP'] = True
    jobid = start_a_job()
    post_for_poll(jobid)
    forcestop_a_job(jobid)
    sleep()

    # Simulate poll post 'Stopped'
    app.config['LOG_CRITICAL_THRESHOLD'] = 0
    app.config['LOG_REDIRECT_THRESHOLD'] = 1
    app.config['LOG_REDIRECT_TRIGGER_STOP'] = True
    jobid = start_a_job()
    post_for_poll(jobid)
    forcestop_a_job(jobid)
    sleep()

    # Simulate poll post 'Triggered'
    app.config['LOG_REDIRECT_THRESHOLD'] = 0
    app.config['LOG_IGNORE_THRESHOLD'] = 1
    jobid = start_a_job()
    post_for_poll(jobid)
    forcestop_a_job(jobid)
    sleep()

    # Simulate poll post 'Running'
    app.config['LOG_IGNORE_THRESHOLD'] = 0
    app.config['ON_JOB_RUNNING_INTERVAL'] = 5
    jobid = start_a_job()
    post_for_poll(jobid)  # Would not trigger email

    sleep()
    post_for_poll(jobid)  # Would trigger email

    app.config['ON_JOB_RUNNING_INTERVAL'] = 0
    sleep()
    post_for_poll(jobid)  # Would not trigger email
    forcestop_a_job(jobid)
Ejemplo n.º 20
0
def test_log_utf8_stats(app, client):
    # In ScrapydWeb_demo.egg: CONCURRENT_REQUESTS=1, DOWNLOAD_DELAY=10
    upload_file_deploy(app,
                       client,
                       filename='ScrapydWeb_demo.egg',
                       project=cst.PROJECT,
                       redirect_project=cst.PROJECT)

    with app.test_request_context():
        kws = dict(node=1,
                   opt='start',
                   project=cst.PROJECT,
                   version_spider_job=cst.SPIDER)
        __, js = req(app, client, view='api', kws=kws)
        jobid = js['jobid']
        sleep()

        # the Stats page
        req(app,
            client,
            view='log',
            kws=dict(node=1,
                     opt='stats',
                     project=cst.PROJECT,
                     spider=cst.SPIDER,
                     job=jobid),
            ins='Log analysis')
        # the Log page
        req(app,
            client,
            view='log',
            kws=dict(node=1,
                     opt='utf8',
                     project=cst.PROJECT,
                     spider=cst.SPIDER,
                     job=jobid),
            ins='log - ScrapydWeb')

        # For testing request_scrapy_log() of LogView in log.py
        app.config['LOCAL_SCRAPYD_LOGS_DIR'] = 'non-exist-dir'
        req(app,
            client,
            view='log',
            kws=dict(node=1,
                     opt='utf8',
                     project=cst.PROJECT,
                     spider=cst.SPIDER,
                     job=jobid),
            ins='log - ScrapydWeb')

        # the Jobs page GET
        url_stop = url_for('api',
                           node=1,
                           opt='stop',
                           project=cst.PROJECT,
                           version_spider_job=jobid)
        url_jobs_classic = url_for('jobs', node=1, style='classic')
        url_jobs_database = url_for('jobs', node=1, style='database')
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='classic'),
            ins=[url_stop, url_jobs_database, 'class="table wrap"'],
            nos="Vue.extend(Main)")
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='database'),
            ins=[url_stop, url_jobs_classic, "Vue.extend(Main)"],
            nos='class="table wrap"')

        # ?raise_exception=True
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='database'),
            ins=[url_stop, url_jobs_classic, "Vue.extend(Main)"],
            nos='class="table wrap"')
        req(app,
            client,
            view='jobs',
            kws=dict(node=1),
            ins=[url_stop, url_jobs_classic, "Vue.extend(Main)"],
            nos='class="table wrap"')
        req(app,
            client,
            view='metadata',
            kws=dict(node=1),
            jskws=dict(jobs_style='database'))

        req(app,
            client,
            view='jobs',
            kws=dict(node=1, raise_exception='True'),
            ins=[url_stop, url_jobs_database, 'class="table wrap"'],
            nos="Vue.extend(Main)")
        req(app,
            client,
            view='metadata',
            kws=dict(node=1),
            jskws=dict(jobs_style='classic'))
        req(app,
            client,
            view='jobs',
            kws=dict(node=1),
            ins=[url_stop, url_jobs_database, 'class="table wrap"'],
            nos="Vue.extend(Main)")

        # jobs POST data={}
        jobs_key = '%s/%s/%s' % (cst.PROJECT, cst.SPIDER, jobid
                                 )  # type unicode in Python 2
        print('######')
        print(repr(jobs_key))
        print(type(jobs_key))
        __, js = req(app,
                     client,
                     view='jobs',
                     kws=dict(node=1),
                     data={},
                     jskeys=jobs_key)
        jobs_id = js[jobs_key]['id']
        jobs_start = js[jobs_key]['start']
        assert js[jobs_key]['deleted'] == '0'

        # JobsXhrView delete running job
        req(app,
            client,
            view='jobs.xhr',
            kws=dict(node=1, action='delete', id=jobs_id),
            jskws=dict(status=cst.OK))
        # Recover deleted running job
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='database'),
            ins=[
                'Recover deleted job:', url_stop,
                'id: %s,' % jobs_id, jobs_start
            ])

        # forcestop
        client.get(
            url_for('api',
                    node=1,
                    opt='forcestop',
                    project=cst.PROJECT,
                    version_spider_job=jobid))
        sleep()

        # /1/schedule/ScrapydWeb_demo/default:%20the%20latest%20version/test/   NOT unique
        url_start = url_for('schedule',
                            node=1,
                            project=cst.PROJECT,
                            version=cst.DEFAULT_LATEST_VERSION,
                            spider=cst.SPIDER)
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='classic'),
            ins=url_start)
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='database'),
            ins=url_start)

        # JobsXhrView delete finished
        req(app,
            client,
            view='jobs.xhr',
            kws=dict(node=1, action='delete', id=jobs_id),
            jskws=dict(status=cst.OK))
        # JobsView: query_jobs(): self.jobs = self.Job.query.filter_by(deleted=NOT_DELETED)
        # POST data={}
        req(app,
            client,
            view='jobs',
            kws=dict(node=1),
            data={},
            nos=['id: %s,' % jobs_id, jobs_start])
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='database'),
            nos=['id: %s,' % jobs_id, jobs_start])
        req(app,
            client,
            view='jobs',
            kws=dict(node=1, style='classic'),
            ins=jobs_start[5:])
        # delete id not exist
        req(app,
            client,
            view='jobs.xhr',
            kws=dict(node=1, action='delete', id=cst.BIGINT),
            jskws=dict(status=cst.ERROR))
Ejemplo n.º 21
0
def test_upload_file_deploy(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    upload_file_deploy(app,
                       client,
                       filename='demo.zip',
                       project='test_demo_zip',
                       redirect_project='test_demo_zip')
    upload_file_deploy(app,
                       client,
                       filename='demo_inner.zip',
                       project='test_inner_zip',
                       redirect_project='test_inner_zip')
    upload_file_deploy(app,
                       client,
                       filename='demo_outer.zip',
                       project='test_outer_zip',
                       redirect_project='test_outer_zip')
    upload_file_deploy(app,
                       client,
                       filename=u'demo - 副本.zip',
                       project=u'demo - 副本',
                       redirect_project='demo-')

    upload_file_deploy(app,
                       client,
                       filename='demo.tar',
                       project='test_demo_tar',
                       redirect_project='test_demo_tar')
    upload_file_deploy(app,
                       client,
                       filename=u'demo - 副本.tar',
                       project=u'demo - 副本',
                       redirect_project='demo-')
    upload_file_deploy(app,
                       client,
                       filename='demo.tar.gz',
                       project='test_demo_tar_gz',
                       redirect_project='test_demo_tar_gz')

    upload_file_deploy(app,
                       client,
                       filename='demo_without_scrapy_cfg.zip',
                       project='demo_without_scrapy_cfg',
                       alert='scrapy.cfg NOT found')
    upload_file_deploy(app,
                       client,
                       filename='demo_only_scrapy_cfg.zip',
                       project='demo_only_scrapy_cfg',
                       alert='ModuleNotFoundError')
Ejemplo n.º 22
0
def test_log_utf8_stats(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    with app.test_request_context():
        url = url_for('api',
                      node=1,
                      opt='start',
                      project=PROJECT,
                      version_spider_job=SPIDER)
        response = client.get(url)
        js = load_json(response)
        jobid = js['jobid']

        sleep()

        # Log page
        url = url_for('log',
                      node=1,
                      opt='utf8',
                      project=PROJECT,
                      spider=SPIDER,
                      job=jobid)
        response = client.get(url)
        assert 'log - ScrapydWeb' in get_text(
            response) and not is_mobileui(response)

        # Stats page
        url = url_for('log',
                      node=1,
                      opt='stats',
                      project=PROJECT,
                      spider=SPIDER,
                      job=jobid)
        response = client.get(url)
        assert 'Stats collection' in get_text(
            response) and not is_mobileui(response)

        # Dashboard page
        url = url_for('dashboard', node=1)
        response = client.get(url)
        url_stop = url_for('api',
                           node=1,
                           opt='stop',
                           project=PROJECT,
                           version_spider_job=jobid)
        assert url_stop in get_text(response)

        client.get(
            url_for('api',
                    node=1,
                    opt='forcestop',
                    project=PROJECT,
                    version_spider_job=jobid))

        # /1/schedule/ScrapydWeb-demo/default:%20the%20latest%20version/test/
        response = client.get(url)
        url_start = url_for('schedule.schedule',
                            node=1,
                            project=PROJECT,
                            version=DEFAULT_LATEST_VERSION,
                            spider=SPIDER)
        assert url_start in get_text(response)
Ejemplo n.º 23
0
def test_log_utf8_stats(app, client):
    upload_file_deploy(app,
                       client,
                       filename='demo.egg',
                       project=PROJECT,
                       redirect_project=PROJECT)

    with app.test_request_context():
        __, js = req(app,
                     client,
                     view='api',
                     kws=dict(node=1,
                              opt='start',
                              project=PROJECT,
                              version_spider_job=SPIDER))
        print(js)
        jobid = js['jobid']

        sleep()

        # Log page
        req(app,
            client,
            view='log',
            kws=dict(node=1,
                     opt='utf8',
                     project=PROJECT,
                     spider=SPIDER,
                     job=jobid),
            ins='log - ScrapydWeb')

        # Stats page
        req(app,
            client,
            view='log',
            kws=dict(node=1,
                     opt='stats',
                     project=PROJECT,
                     spider=SPIDER,
                     job=jobid),
            ins='Stats collection')

        # Dashboard page
        url_stop = url_for('api',
                           node=1,
                           opt='stop',
                           project=PROJECT,
                           version_spider_job=jobid)
        req(app, client, view='dashboard', kws=dict(node=1), ins=url_stop)

        client.get(
            url_for('api',
                    node=1,
                    opt='forcestop',
                    project=PROJECT,
                    version_spider_job=jobid))

        # /1/schedule/ScrapydWeb-demo/default:%20the%20latest%20version/test/
        url_start = url_for('schedule.schedule',
                            node=1,
                            project=PROJECT,
                            version=DEFAULT_LATEST_VERSION,
                            spider=SPIDER)
        req(app, client, view='dashboard', kws=dict(node=1), ins=url_start)