def test_update_feed_entries(fx_xml_for_update, fx_test_stage, fx_crawling_queue): with app.test_client() as client: feed_two_id = get_hash('http://feedtwo.com/feed/atom/') with fx_test_stage as stage: assert len(stage.feeds[feed_two_id].entries) == 1 assert crawling_queue.qsize() == 0 r = client.put( get_url('update_entries', category_id='-categoryone/-categorytwo', feed_id=feed_two_id)) assert r.status_code == 202 assert crawling_queue.qsize() == 1
def test_update_feed_entries(fx_xml_for_update, fx_test_stage, fx_crawling_queue): with app.test_client() as client: feed_two_id = get_hash('http://feedtwo.com/feed/atom/') with fx_test_stage as stage: assert len(stage.feeds[feed_two_id].entries) == 1 assert crawling_queue.qsize() == 0 r = client.put( get_url( 'update_entries', category_id='-categoryone/-categorytwo', feed_id=feed_two_id ) ) assert r.status_code == 202 assert crawling_queue.qsize() == 1
def test_update_category_entries(fx_xml_for_update, fx_test_stage, fx_crawling_queue): with app.test_client() as client: feed_two_id = get_hash('http://feedtwo.com/feed/atom/') feed_three_id = get_hash('http://feedthree.com/feed/atom/') with fx_test_stage as stage: assert len(stage.feeds[feed_two_id].entries) == 1 assert len(stage.feeds[feed_three_id].entries) == 1 r = client.put('/entries/') assert r.status_code == 202 assert crawling_queue.qsize() == 1
def test_update_category_entries(fx_xml_for_update, fx_test_stage, fx_crawling_queue): with app.test_client() as client: feed_two_id = get_hash('http://feedtwo.com/feed/atom/') feed_three_id = get_hash('http://feedthree.com/feed/atom/') with fx_test_stage as stage: assert len(stage.feeds[feed_two_id].entries) == 1 assert len(stage.feeds[feed_three_id].entries) == 1 r = client.put('/entries/') assert r.status_code == 202 assert crawling_queue.qsize() == 1