def test_gen_html_content(): tasks._gen_html(SPIDER_NAME, SPIDER_START_URLS) test_file_content = read_test_file_content( 'files/testspider_articles.html.example') rendered_content = read_test_file_content('files/testspider_articles.html') test_dom = fromstring(test_file_content) rendered_dom = fromstring(rendered_content) test_title = test_dom.xpath('//title')[0].text rendered_title = rendered_dom.xpath('//title')[0].text assert test_title == rendered_title
def load_valid_item(): fixtures = json.loads( read_test_file_content('files/exporter_fixture.json')) valid_item = fixtures[0] valid_item['start_time'] = _str_to_datetime(valid_item['start_time']) valid_item['end_time'] = _str_to_datetime(valid_item['end_time']) return valid_item
def test_gen_requests(): test_response = json.loads(read_test_file_content('files/chi_citycouncil_feed.json')) event_requests = [item for item in spider._gen_requests(test_response)] assert event_requests == [ 'https://ocd.datamade.us/ocd-event/86094f46-cf45-46f8-89e2-0bf783e7aa12/', 'https://ocd.datamade.us/ocd-event/93d62d20-b1dc-4d71-9e96-60c99c837e90/', ]
def test_gen_html_content(): with recorder.use_cassette('test_gen_html_content'): rendered_filenames = tasks._gen_html(SPIDER_NAME, SPIDER_START_URLS, session=session) test_file_content = read_test_file_content( 'files/testspider_articles.html.example') rendered_content = read_test_file_content('files/testspider_articles.html') test_dom = fromstring(test_file_content) rendered_dom = fromstring(rendered_content) test_title = test_dom.xpath('//title')[0].text rendered_title = rendered_dom.xpath('//title')[0].text assert test_title == rendered_title if test_title == rendered_title: for f in rendered_filenames: os.remove(f)
def test_render_spider(): test_file_content = read_test_file_content('files/testspider.py.example') rendered_content = tasks._render_content('spider.tmpl', name=SPIDER_NAME, long_name=SPIDER_LONG_NAME, domains=SPIDER_DOMAINS, start_urls=SPIDER_START_URLS) assert test_file_content == rendered_content
def test_render_spider(): test_file_content = read_test_file_content('files/testspider.py.example') rendered_content = tasks._render_content('spider.tmpl', name=SPIDER_NAME, agency_id=SPIDER_AGENCY_ID, domains=SPIDER_DOMAINS, start_urls=SPIDER_START_URLS) assert test_file_content.strip() == rendered_content.strip()
def test_gen_html_content(monkeypatch): patched_fetch_url = Mock() patched_fetch_url.return_value = file_response( 'files/testspider_articles.html.example') monkeypatch.setattr('scripts.generate_spider._fetch_url', patched_fetch_url) rendered_filenames = generate_spider._gen_html(SPIDER_NAME, [SPIDER_START_URLS[0]], session=session) test_file_content = read_test_file_content( 'files/testspider_articles.html.example') rendered_content = read_test_file_content('files/testspider_articles.html') test_dom = fromstring(test_file_content) rendered_dom = fromstring(rendered_content) test_title = test_dom.xpath('//title')[0].text rendered_title = rendered_dom.xpath('//title')[0].text assert test_title == rendered_title for f in rendered_filenames: os.remove(f)
def test_render_test(): test_file_content = read_test_file_content( 'files/test_testspider.py.example') freezer = freeze_time('2018-12-01') freezer.start() rendered_content = generate_spider._render_content( 'test.tmpl', name=SPIDER_NAME, domains=SPIDER_DOMAINS, start_urls=SPIDER_START_URLS) freezer.stop() assert test_file_content == rendered_content
def load_valid_item(): fixtures = json.loads(read_test_file_content('files/travis_fixture.json')) valid_item = fixtures[0] valid_item['start']['date'] = _str_to_date(valid_item['start']['date']) valid_item['start']['time'] = _str_to_time(valid_item['start']['time']) return valid_item
def test_render_spider(): test_file_content = read_test_file_content('files/testspider.py.example') rendered_content = tasks._render_content(SPIDER_NAME, SPIDER_URL, 'spider.tmpl') assert test_file_content == rendered_content
import json from tests.utils import read_test_file_content from documenters_aggregator.pipelines.TravisValidation import TravisValidationPipeline fixtures = json.loads(read_test_file_content('files/travis_fixture.json')) valid_item = fixtures[0] pipeline = TravisValidationPipeline() def test_valid_process_item(): processed = pipeline.process_item(valid_item, None) for k, v in processed.items(): if k.startswith('val_'): assert v == 1 def test_invalid_required_value(): invalid_item = valid_item.copy() invalid_item['id'] = '' processed = pipeline.process_item(invalid_item, None) assert processed['val_id'] == 0 def test_invalid_type(): invalid_item = valid_item.copy() invalid_item['id'] = True processed = pipeline.process_item(invalid_item, None) assert processed['val_id'] == 0 def test_invalid_format():