Esempio n. 1
0
def test_update_feed_entries(fx_xml_for_update, fx_test_stage,
                             fx_crawling_queue):
    with app.test_client() as client:
        worker.start_worker()
        feed_two_id = get_hash('http://feedtwo.com/feed/atom/')
        with fx_test_stage as stage:
            assert len(stage.feeds[feed_two_id].entries) == 1

        assert worker.qsize() == 0
        r = client.put(
            get_url(
                'update_entries',
                category_id='-categoryone/-categorytwo',
                feed_id=feed_two_id
            )
        )
        assert r.status_code == 202
        worker.kill_worker()
        r = client.put(
            get_url(
                'update_entries',
                category_id='-categoryone/-categorytwo',
                feed_id=feed_two_id
            )
        )
        assert r.status_code == 404
Esempio n. 2
0
def test_update_feed_entries(fx_xml_for_update, fx_test_stage,
                             fx_crawling_queue):
    with app.test_client() as client:
        worker.start_worker()
        feed_two_id = get_hash('http://feedtwo.com/feed/atom/')
        with fx_test_stage as stage:
            assert len(stage.feeds[feed_two_id].entries) == 1

        assert worker.qsize() == 0
        r = client.put(
            get_url(
                'update_entries',
                category_id='-categoryone/-categorytwo',
                feed_id=feed_two_id
            )
        )
        assert r.status_code == 202
        worker.kill_worker()
        r = client.put(
            get_url(
                'update_entries',
                category_id='-categoryone/-categorytwo',
                feed_id=feed_two_id
            )
        )
        assert r.status_code == 404
Esempio n. 3
0
def fx_crawler(request):
    worker.start_worker()
    worker.empty_queue()

    request.addfinalizer(worker.kill_worker)
Esempio n. 4
0
def fx_crawler(request):
    worker.start_worker()
    worker.empty_queue()

    request.addfinalizer(worker.kill_worker)