def test_return_value(self): disable_stdout() stub = DataPressScraperStub('foo.bar/', 'X01000001', 'baz', 'csv', [], 'utf-8') url = stub.scrape() enable_stdout() self.assertEqual("https://foo.bar/baz.csv", url)
def test_extra_fields(self): disable_stdout() stub = DataPressScraperStub('foo.bar/', 'X01000001', 'baz', 'csv', ['description'], 'utf-8') stub.scrape() enable_stdout() result = scraperwiki.sqlite.select(" * FROM resources;") self.assertTrue("description" in result[0])
def run_scraper(self): disable_stdout() fields = { '{http://ogr.maptools.org/}NUM': 'NUM', '{http://ogr.maptools.org/}TEXT': 'TEXT', '{http://ogr.maptools.org/}OBJECTID': 'OBJECTID', } stub = GmlScraperStub('foo.bar/baz', 'X01000001', 'foo', fields, 'OBJECTID') stub.scrape() enable_stdout()
def test_scraper(self): disable_stdout() stub = DataPressScraperStub('foo.bar/', 'X01000001', 'baz', 'csv', [], 'utf-8') stub.scrape() enable_stdout() # should have inserted one row for each resource result = scraperwiki.sqlite.select(" * FROM resources;") self.assertEqual(2, len(result)) self.assertTrue("format" in result[0]) self.assertTrue("url" in result[0]) self.assertFalse("description" in result[0])
def run_scraper(self): disable_stdout() stub = GeoJsonScraperStub('foo.bar/baz', 'X01000001', 'utf-8', 'foo') stub.scrape() enable_stdout()