def mmwr_task(): processor = MortalityProcessor() processor.run()
def setUp(self): self.processor = MortalityProcessor() httpretty.enable()
class MMWRTest(TestCase): """ Tests the dataqs.mmwr module. Since each processor is highly dependent on a running GeoNode instance for most functions, only independent functions are tested here. """ def setUp(self): self.processor = MortalityProcessor() httpretty.enable() def tearDown(self): httpretty.disable() self.processor.cleanup() def test_download(self): """ Verify that a file is downloaded """ report_date = datetime.date(2016, 1, 15) httpretty.register_uri(httpretty.GET, self.processor.base_url.format(week=2, year=2016), body=test_data()) self.processor.generate_csv(report_date) output = os.path.join(self.processor.tmp_dir, '{}.txt'.format(self.processor.prefix)) self.assertTrue(os.path.exists(output)) with open(output) as ofile: self.assertEquals(ofile.read(), test_data()) def test_generate_csv(self): """ Verify that a correct csv file is generated :return: """ report_date = datetime.date(2016, 1, 15) httpretty.register_uri(httpretty.GET, self.processor.base_url.format(week=2, year=2016), body=test_data()) self.processor.generate_csv(report_date) output = os.path.join(self.processor.tmp_dir, '{}.csv'.format(self.processor.prefix)) self.assertTrue(os.path.exists(output)) with open(output) as ofile: reader = csv.reader(ofile) headers = reader.next() with open(os.path.join(script_dir, 'resources/mmwr.json')) as locs: locations = json.load(locs) self.assertEquals(headers, [ 'place', 'lng', 'lat', 'all', 'a65', 'a45_64', 'a25_44', 'a01-24', 'a01', 'flu', 'report_date' ]) for row in reader: self.assertIn(row[0], locations) self.assertEquals(float(row[1]), locations[row[0]][1]) self.assertEquals(float(row[2]), locations[row[0]][0]) def test_cleanup(self): report_date = datetime.date(2016, 1, 15) httpretty.register_uri(httpretty.GET, self.processor.base_url.format(week=2, year=2016), body=test_data()) self.processor.generate_csv(report_date) self.assertNotEqual([], glob.glob( os.path.join(self.processor.tmp_dir, self.processor.prefix + '*'))) self.processor.cleanup() self.assertEquals([], glob.glob( os.path.join(self.processor.tmp_dir, self.processor.prefix + '*')))
class MMWRTest(TestCase): """ Tests the dataqs.mmwr module. Since each processor is highly dependent on a running GeoNode instance for most functions, only independent functions are tested here. """ def setUp(self): self.processor = MortalityProcessor() httpretty.enable() def tearDown(self): httpretty.disable() self.processor.cleanup() def test_download(self): """ Verify that a file is downloaded """ report_date = datetime.date(2016, 1, 15) httpretty.register_uri( httpretty.GET, self.processor.base_url.format(week=2, year=2016), body=test_data()) self.processor.generate_csv(report_date) output = os.path.join( self.processor.tmp_dir, '{}.txt'.format(self.processor.prefix)) self.assertTrue(os.path.exists(output)) with open(output) as ofile: self.assertEquals(ofile.read(), test_data()) def test_generate_csv(self): """ Verify that a correct csv file is generated :return: """ report_date = datetime.date(2016, 1, 15) httpretty.register_uri( httpretty.GET, self.processor.base_url.format(week=2, year=2016), body=test_data()) self.processor.generate_csv(report_date) output = os.path.join( self.processor.tmp_dir, '{}.csv'.format(self.processor.prefix)) self.assertTrue(os.path.exists(output)) with open(output) as ofile: reader = csv.reader(ofile) headers = reader.next() with open(os.path.join(script_dir, 'resources/mmwr.json')) as locs: locations = json.load(locs) self.assertEquals( headers, ['place', 'lng', 'lat', 'all', 'a65', 'a45_64', 'a25_44', 'a01-24', 'a01', 'flu', 'report_date']) for row in reader: self.assertIn(row[0], locations) self.assertEquals(float(row[1]), locations[row[0]][1]) self.assertEquals(float(row[2]), locations[row[0]][0]) def test_cleanup(self): report_date = datetime.date(2016, 1, 15) httpretty.register_uri( httpretty.GET, self.processor.base_url.format(week=2, year=2016), body=test_data()) self.processor.generate_csv(report_date) self.assertNotEqual([], glob.glob(os.path.join( self.processor.tmp_dir, self.processor.prefix + '*'))) self.processor.cleanup() self.assertEquals([], glob.glob(os.path.join( self.processor.tmp_dir, self.processor.prefix + '*')))