def test_get_load_historical_and_forecast_date_range_returns_expected( self, mocked_request): hours = 12 start_at = self.tzaware_utcnow - timedelta(hours=hours) end_at = self.tzaware_utcnow + timedelta(hours=hours) expected_historical_url = 'http://www.nspower.ca/system_report/today/currentload.json' expected_historical_response = read_fixture(self.c.__module__, 'currentload.json') mocked_request.get( expected_historical_url, content=expected_historical_response.encode('utf-8')) expected_forecast_url = 'http://www.nspower.ca/system_report/today/forecast.json' expected_forecast_reponse = read_fixture(self.c.__module__, 'forecast.json') mocked_request.get(expected_forecast_url, content=expected_forecast_reponse.encode('utf-8')) results = self.c.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(results), hours * 2) # Spot check values at the start and end of the results self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-05T00:00:00.000Z')) self.assertAlmostEqual(results[0]['load_MW'], 877.25) self.assertEqual(results[23]['timestamp'], Timestamp('2017-10-05T23:00:00.000Z')) self.assertAlmostEqual(results[23]['load_MW'], 1020)
def test_get_load_dange_range_with_latest_and_forecast( self, expected_requests): self.nbpower_client = client_factory('NBP') exp_forect_url = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/2017-07-16%2022.csv' mocked_csv = read_fixture('nbpower', '2017-07-16 22.csv').encode('utf8') mocked_html = read_fixture( 'nbpower', 'SystemInformation_realtime.html').encode('utf8') expected_requests.get(self.nbpower_client.LATEST_REPORT_URL, content=mocked_html) expected_requests.get(exp_forect_url, content=mocked_csv) start_at = parse('2017-07-16T00:00:00-03:00') end_at = parse('2017-07-17T01:00:00-03:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(load_ts), 4) # latest + 3 forecasts. # The first forecast is in the past, so the first timestamp should be from the latest report. self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-07-17T01:57:29.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 1150) self.assertEqual(load_ts[1].get('timestamp', None), Timestamp('2017-07-17T02:00:00.000Z')) self.assertEqual(load_ts[1].get('load_MW', None), 1160) self.assertEqual(load_ts[2].get('timestamp', None), Timestamp('2017-07-17T03:00:00.000Z')) self.assertEqual(load_ts[2].get('load_MW', None), 1129) self.assertEqual(load_ts[3].get('timestamp', None), Timestamp('2017-07-17T04:00:00.000Z')) self.assertEqual(load_ts[3].get('load_MW', None), 1089)
def test_get_generation_latest_returns_expected(self, mocked_request): expected_url = 'http://www.nspower.ca/system_report/today/currentmix.json' expected_response = read_fixture(self.c.__module__, 'currentmix.json') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = self.c.get_generation(latest=True) expected_length = 8 # 8 fuels self.assertEqual(len(results), expected_length) # Check that all datetime values are equal and known fuel values expected_datetime = parse('2017-10-05T11:00:00.000Z') expected_mw_by_fuel = { 'coal': 44.91, 'dual': 14.13, 'oil': 0.02, 'ccgt': 0, 'biomass': 2.44, 'hydro': 2.74, 'wind': 35.52, 'other': 0.25 } for result in results: self.assertEqual(result['timestamp'], expected_datetime) expected_gen_mw = expected_mw_by_fuel.get(result['fuel_name'], -1) self.assertAlmostEqual(result['gen_MW'], expected_gen_mw)
def test_get_generation_latest_returns_expected(self, mocked_request): expected_url = 'http://www.nspower.ca/system_report/today/currentmix.json' expected_response = read_fixture(self.c.__module__, 'currentmix.json') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = self.c.get_generation(latest=True) expected_length = 8 # 8 fuels self.assertEqual(len(results), expected_length) # Check that all datetime values are equal and known fuel values expected_timestamp = Timestamp('2017-10-05T11:00:00.000Z') expected_mw_by_fuel = { 'coal': 44.91, 'dual': 14.13, 'oil': 0.02, 'ccgt': 0, 'biomass': 2.44, 'hydro': 2.74, 'wind': 35.52, 'other': 0.25 } for result in results: self.assertEqual(result['timestamp'], expected_timestamp) expected_gen_mw = expected_mw_by_fuel.get(result['fuel_name'], -1) self.assertAlmostEqual(result['gen_MW'], expected_gen_mw)
def test_get_load_forecast_dst_start(self, expected_requests): self.nbpower_client = client_factory('NBP') exp_forect_url = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/2017-03-12%2000.csv' mocked_csv = read_fixture('nbpower', '2017-03-12 00.csv').encode('utf8') expected_requests.get(exp_forect_url, content=mocked_csv) start_at = parse('2017-03-12T00:00:00-04:00') end_at = parse('2017-03-12T04:00:00-03:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual( len(load_ts), 3 ) # 4 forecasts - 1 for quirky duplicate 2017-03-12T03:00-04:00 overwrite self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-03-12T04:00:00.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 2020) self.assertEqual(load_ts[1].get('timestamp', None), Timestamp('2017-03-12T05:00:00.000Z')) self.assertEqual(load_ts[1].get('load_MW', None), 1982) self.assertEqual(load_ts[2].get('timestamp', None), Timestamp('2017-03-12T06:00:00.000Z')) self.assertEqual( load_ts[2].get('load_MW', None), 1974) # second T03:00-03:00 load value overwrites first
def test_get_generation_success(self, mock_request): expected_response = read_fixture(self.c.NAME, 'chart-values.json').encode('utf8') mock_request.get('http://www.gov.pe.ca/windenergy/chart-values.php', content=expected_response) expected_timestamp = datetime(year=2017, month=9, day=25, hour=10, minute=1, second=1, microsecond=0, tzinfo=pytz.utc) load_ts = self.c.get_generation(latest=True) self.assertEqual(len(load_ts), 3) self.assertEqual(load_ts[0].get('timestamp', None), expected_timestamp) self.assertEqual(load_ts[0].get('fuel_name', None), 'oil') self.assertEqual(load_ts[0].get('gen_MW', None), 0) self.assertEqual(load_ts[1].get('timestamp', None), expected_timestamp) self.assertEqual(load_ts[1].get('fuel_name', None), 'other') self.assertEqual(load_ts[1].get('gen_MW', None), 146.01) self.assertEqual(load_ts[2].get('timestamp', None), expected_timestamp) self.assertEqual(load_ts[2].get('fuel_name', None), 'wind') self.assertEqual(load_ts[2].get('gen_MW', None), 4.55)
def test_get_load_forecast_standard_time_start(self, expected_requests): self.nbpower_client = client_factory('NBP') exp_forect_url = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/2017-11-05%2000.csv' mocked_csv = read_fixture('nbpower', '2017-11-05 00.csv').encode('utf8') expected_requests.get(exp_forect_url, content=mocked_csv) start_at = parse('2017-11-05T00:00:00-03:00') end_at = parse('2017-11-05T03:00:00-04:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(load_ts), 4) # 4 hours of load forecasts self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-11-05T03:00:00.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 1293) self.assertEqual(load_ts[1].get('timestamp', None), Timestamp('2017-11-05T04:00:00.000Z')) self.assertEqual(load_ts[1].get('load_MW', None), 1266) self.assertEqual(load_ts[2].get('timestamp', None), Timestamp('2017-11-05T05:00:00.000Z')) self.assertEqual(load_ts[2].get('load_MW', None), 1261) # CSV skips time 20171105020000AS (i.e. 2017-11-05T02:00:00-04:00) self.assertEqual(load_ts[3].get('timestamp', None), Timestamp('2017-11-05T07:00:00.000Z')) self.assertEqual(load_ts[3].get('load_MW', None), 1262)
def test_get_generation_valid_date_range_during_dst_returns_expected( self, mocked_request): frozen_client = client_factory('YUKON') frozen_utcnow = datetime.utcnow().replace(tzinfo=pytz.utc) start_at = frozen_utcnow - timedelta(hours=12) end_at = frozen_utcnow expected_url = 'http://www.yukonenergy.ca/consumption/chart.php?chart=hourly' expected_response = read_fixture(frozen_client.__module__, 'hourly_2017-10-11.html') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = frozen_client.get_generation(start_at=start_at, end_at=end_at) expected_length = 22 # 2 fuels * 11 hours (1 hour is missing in this particular response) self.assertEqual(len(results), expected_length) # Spot check values at the start and end of the results self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-10T23:00:00Z')) self.assertEqual(results[0]['fuel_name'], 'hydro') self.assertAlmostEqual(results[0]['gen_MW'], 51.36) self.assertEqual(results[21]['timestamp'], Timestamp('2017-10-11T09:00:00Z')) self.assertEqual(results[21]['fuel_name'], 'thermal') self.assertAlmostEqual(results[21]['gen_MW'], 0)
def test_build_jats(self): "check building JATS XML content from a DOCX file" docx_file = "DIGEST 99999.docx" expected_content = read_fixture("jats_content_99999.txt").decode( "utf-8") jats_content = jats.build_jats(data_path(docx_file)) self.assertEqual(jats_content, expected_content)
def test_get_generation_dst_start(self, mock_request): expected_url = 'http://content.caiso.com/green/renewrpt/20170312_DailyRenewablesWatch.txt' expected_response = read_fixture( self.c.__module__, '20170312_DailyRenewablesWatch.txt').encode('utf-8') mock_request.get(expected_url, content=expected_response) start_at = parse('2017-03-12T00:00:00-08:00') end_at = parse('2017-03-12T23:59:59-07:00') generation = self.c.get_generation( start_at=start_at, end_at=end_at, # FIXME: Non-base kwargs are required to route to _generation_historical() market=self.c.MARKET_CHOICES.hourly, freq=self.c.FREQUENCY_CHOICES.hourly) self.assertEqual( generation[0]['timestamp'], Timestamp('2017-03-12T08:00:00Z')) # '2017-03-12T00:00:00-08:00' self.assertEqual( generation[9]['timestamp'], Timestamp('2017-03-12T09:00:00Z')) # '2017-03-12T01:00:00-08:00' self.assertEqual( generation[18]['timestamp'], Timestamp('2017-03-12T10:00:00Z')) # '2017-03-12T03:00:00-07:00' self.assertEqual( generation[229]['timestamp'], Timestamp('2017-03-13T06:00:00Z')) # '2017-03-12T23:00:00-07:00'
def test_get_generation_dst_end(self, mock_request): expected_url = 'http://content.caiso.com/green/renewrpt/20171105_DailyRenewablesWatch.txt' expected_response = read_fixture( self.c.__module__, '20171105_DailyRenewablesWatch.txt').encode('utf-8') mock_request.get(expected_url, content=expected_response) start_at = parse('2017-11-05T00:00:00-07:00') end_at = parse('2017-11-05T23:59:59-08:00') generation = self.c.get_generation( start_at=start_at, end_at=end_at, # FIXME: Non-base kwargs are required to route to _generation_historical() market=self.c.MARKET_CHOICES.hourly, freq=self.c.FREQUENCY_CHOICES.hourly) self.assertEqual( generation[0]['timestamp'], Timestamp('2017-11-05T07:00:00Z')) # '2017-11-05T00:00:00-07:00' self.assertEqual( generation[9]['timestamp'], Timestamp('2017-11-05T08:00:00Z')) # '2017-11-05T01:00:00-07:00' # "Hour 1" appears only once in 25 hour day. The 01:00-08:00 hour is skipped, leaving a gap in UTC timeseries. self.assertEqual( generation[18]['timestamp'], Timestamp('2017-11-05T010:00:00Z')) # '2017-11-05T02:00:00-08:00' self.assertEqual( generation[239]['timestamp'], Timestamp('2017-11-06T07:00:00Z')) # '2017-11-05T23:00:00-08:00'
def test_with_multiple_detectors(self): image_processor = ImageProcessor() detect = image_processor.detect( 'face+profile+glass', read_fixture('one_face.gif') ) expect(detect).Not.to_be_empty()
def test_initialize_mode_production(self): httpretty.register_uri(httpretty.GET, URI_PRODUCTION, body=read_fixture("public_keys.json")) enc = Encrypt.new(mode=MODE_PRODUCTION) self.assertEqual( "ca5af2d14bee923a0a0d1687b7c77e7211a57f84:::683150ee69b4d906aa883d0ac12b0fdd79f95bcf", enc.fingerprint, ) data = get_random_bytes(1024 * 1024) # 1MB encrypted = enc.encrypt(data) private_keys = json.loads(read_fixture("private_keys.json")) decrypted_a = decrypt(encrypted, private_keys["private_key_a"], 0) self.assertEqual(data, decrypted_a) decrypted_b = decrypt(encrypted, private_keys["private_key_b"], 1) self.assertEqual(data, decrypted_b)
def test_mode_production(self): private_keys = json.loads(read_fixture("private_keys.json")) httpretty.register_uri( httpretty.GET, URI_PRODUCTION, body=read_fixture("public_keys.json") ) args = "--mode production".split(" ") input_data = "test data".encode("utf-8") exitcode, out_data, err_data = run_main(args=args, in_data=input_data) self.assertEqual("", err_data) self.assertEqual(0, exitcode) decrypted_a = decrypt(out_data, private_keys["private_key_a"], 0) self.assertEqual(input_data, decrypted_a) decrypted_b = decrypt(out_data, private_keys["private_key_b"], 1) self.assertEqual(input_data, decrypted_b)
def test_initialize_custom_uri(self): uri = "https://api.example.com/keys" httpretty.register_uri(httpretty.GET, uri, body=read_fixture("public_keys.json")) enc = Encrypt.new(uri=uri) self.assertEqual( "ca5af2d14bee923a0a0d1687b7c77e7211a57f84:::683150ee69b4d906aa883d0ac12b0fdd79f95bcf", enc.fingerprint, ) data = get_random_bytes(1024 * 1024) # 1MB encrypted = enc.encrypt(data) private_keys = json.loads(read_fixture("private_keys.json")) decrypted_a = decrypt(encrypted, private_keys["private_key_a"], 0) self.assertEqual(data, decrypted_a) decrypted_b = decrypt(encrypted, private_keys["private_key_b"], 1) self.assertEqual(data, decrypted_b)
def test_build_to_html(self): "test building from a DOCX file and converting to HTML" docx_file = "DIGEST 99999.docx" expected_title = u"Fishing for errors in the tests" expected_summary = read_fixture("html_content_99999_summary.txt").decode( "utf-8" ) expected_text_1 = read_fixture("html_content_99999_text_1.txt").decode("utf-8") expected_text_2 = read_fixture("html_content_99999_text_2.txt").decode("utf-8") expected_text_3 = read_fixture("html_content_99999_text_3.txt").decode("utf-8") # build the digest object digest = build.build_digest(data_path(docx_file)) # test assertions self.assertEqual(html.string_to_html(digest.title), expected_title) self.assertEqual(html.string_to_html(digest.summary), expected_summary) self.assertEqual(html.string_to_html(digest.text[0]), expected_text_1) self.assertEqual(html.string_to_html(digest.text[1]), expected_text_2) self.assertEqual(html.string_to_html(digest.text[2]), expected_text_3)
def test_get_load_success(self, mock_request): expected_response = read_fixture(self.c.NAME, 'chart-values.json').encode('utf8') mock_request.get('http://www.gov.pe.ca/windenergy/chart-values.php', content=expected_response) load_ts = self.c.get_load(latest=True) self.assertEqual(len(load_ts), 1) self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-09-25T10:01:01.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 150.56)
def test_custom_uri(self): uri = "https://example.com/keys" private_keys = json.loads(read_fixture("private_keys.json")) httpretty.register_uri( httpretty.GET, uri, body=read_fixture("public_keys.json") ) args = f"--uri {uri}".split(" ") input_data = "test data".encode("utf-8") exitcode, out_data, err_data = run_main(args=args, in_data=input_data) self.assertEqual("", err_data) self.assertEqual(0, exitcode) decrypted_a = decrypt(out_data, private_keys["private_key_a"], 0) self.assertEqual(input_data, decrypted_a) decrypted_b = decrypt(out_data, private_keys["private_key_b"], 1) self.assertEqual(input_data, decrypted_b)
def test_get_load_latest(self, exptected_requests): mocked_html = read_fixture('nbpower', 'SystemInformation_realtime.html').encode('utf8') exptected_requests.get(self.nbpower_client.LATEST_REPORT_URL, content=mocked_html) load_ts = self.nbpower_client.get_load(latest=True) self.assertEqual(len(load_ts), 1) self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-07-17T01:57:29.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 1150)
def test_get_load_latest(self, mocked_request): response_str = read_fixture(self.c.__module__, 'system-information-center.html') expected_response = response_str.encode('utf-8') mocked_request.get(self.c.SYSTEM_INFO_URL, content=expected_response) load_ts = self.c.get_load(latest=True) self.assertEqual(len(load_ts), 1) self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-10-20T01:45:00Z')) self.assertEqual(load_ts[0].get('load_MW', None), 773)
def test_build_medium_content(self): "test building from a DOCX file and converting to Medium content" docx_file = "DIGEST 99999.docx" expected_medium_content = read_fixture("medium_content_99999.py") # build the digest object medium_content = medium_post.build_medium_content( data_path(docx_file), "tmp", self.digest_config) # test assertions self.assertEqual(medium_content, expected_medium_content)
def test_parse_load_rtm(self): self.c.options = {'data': 'dummy'} actual_load_csv = read_fixture(ba_name='nyiso', filename='20171122pal.csv') df = self.c.parse_load_rtm(actual_load_csv.encode('utf-8')) for idx, row in df.iterrows(): self.assertIn(idx.date(), [date(2017, 11, 22), date(2017, 11, 23)]) self.assertGreater(row['load_MW'], 13000) self.assertLess(row['load_MW'], 22000) self.assertEqual(df.index.name, 'timestamp')
def test_get_load_latest_returns_expected(self, mocked_request): expected_url = 'http://www.nspower.ca/system_report/today/currentload.json' expected_response = read_fixture(self.c.__module__, 'currentload.json') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = self.c.get_load(latest=True) self.assertEqual(len(results), 1) self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-05T11:00:00.000Z')) self.assertAlmostEqual(results[0]['load_MW'], 892.64)
def test_parse_genmix(self): self.c.options = {'data': 'dummy'} rtfuelmix_csv = read_fixture(ba_name='nyiso', filename='20171122rtfuelmix.csv') df = self.c.parse_genmix(rtfuelmix_csv.encode('utf-8')) for idx, row in df.iterrows(): self.assertIn(idx.date(), [date(2017, 11, 22), date(2017, 11, 23)]) self.assertLess(row['gen_MW'], 5500) self.assertIn(row['fuel_name'], self.c.fuel_names.values()) self.assertEqual(df.index.name, 'timestamp')
def test_parse_trade(self): self.c.options = {'data': 'dummy'} external_limits_flows_csv = read_fixture(ba_name='nyiso', filename='20171122ExternalLimitsFlows.csv') df = self.c.parse_trade(external_limits_flows_csv.encode('utf-8')) for idx, row in df.iterrows(): self.assertIn(idx.date(), [date(2017, 11, 22), date(2017, 11, 23)]) self.assertLess(row['net_exp_MW'], -1400) self.assertGreater(row['net_exp_MW'], -6300) self.assertEqual(df.index.name, 'timestamp')
def test_get_load_latest_returns_expected(self, mocked_request): expected_url = 'http://www.nspower.ca/system_report/today/currentload.json' expected_response = read_fixture(self.c.__module__, 'currentload.json') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = self.c.get_load(latest=True) self.assertEqual(len(results), 1) self.assertEqual(results[0]['timestamp'], parse('2017-10-05T11:00:00.000Z')) self.assertAlmostEqual(results[0]['load_MW'], 892.64)
def test_input_file(self): private_keys = json.loads(read_fixture("private_keys.json")) httpretty.register_uri( httpretty.GET, URI_PRODUCTION, body=read_fixture("public_keys.json") ) in_path = f"{TESTS_ROOT}/fixtures/test_file.txt" with open(in_path, mode="rb") as f: in_file_data = f.read() args = f"--mode production --input {in_path}".split(" ") exitcode, out_data, err_data = run_main(args=args) self.assertEqual("", err_data) self.assertEqual(0, exitcode) decrypted_a = decrypt(out_data, private_keys["private_key_a"], 0) self.assertEqual(in_file_data, decrypted_a) decrypted_b = decrypt(out_data, private_keys["private_key_b"], 1) self.assertEqual(in_file_data, decrypted_b)
def test_build_medium_content_with_jats(self): "test building from a zip file and converting to Medium content" docx_file = "DIGEST 99999.zip" jats_file = fixture_file("elife-99999-v0.xml") expected_medium_content = read_fixture("medium_content_jats_99999.py") # build the digest object medium_content = medium_post.build_medium_content( data_path(docx_file), "tmp", self.digest_config, jats_file) # test assertions self.assertEqual(medium_content, expected_medium_content)
def test_build_digest(self, test_data): "check building a digest object from a DOCX file" # note: below after 'the' is a unicode non-breaking space character expected_author = u"Anonymous" expected_title = u"Fishing for errors in the\xa0tests" expected_summary = ( u"Testing a document which mimics the format of a file we’ve used " + "before plus CO<sub>2</sub> and Ca<sup>2+</sup>.") expected_keywords = ["Face Recognition", "Neuroscience", "Vision"] expected_doi = u"https://doi.org/10.7554/eLife.99999" expected_text_len = 3 expected_text_0 = read_fixture( "digest_content_99999_text_1.txt").decode("utf-8") expected_text_1 = read_fixture( "digest_content_99999_text_2.txt").decode("utf-8") expected_text_2 = read_fixture( "digest_content_99999_text_3.txt").decode("utf-8") expected_image_caption = ( u"<b>It’s not just mammals who can recognise sample data.</b>" + u"\xa0Image credit:\xa0Anonymous and Anonymous\xa0(CC BY\xa04.0)") # build now digest_config = parse_raw_config( raw_config(test_data.get("config_section"))) digest = build.build_digest(data_path(test_data.get("file_name")), "tmp", digest_config) # assert assertions self.assertIsNotNone(digest) self.assertEqual(digest.author, expected_author) self.assertEqual(digest.title, expected_title) self.assertEqual(digest.summary, expected_summary) self.assertEqual(digest.keywords, expected_keywords) self.assertEqual(digest.doi, expected_doi) self.assertEqual(len(digest.text), expected_text_len) self.assertEqual(digest.text[0], expected_text_0) self.assertEqual(digest.text[1], expected_text_1) self.assertEqual(digest.text[2], expected_text_2) if digest.image: self.assertEqual(digest.image.caption, expected_image_caption) if test_data.get("image_file"): expected_image_file = os.path.join("tmp", test_data.get("image_file")) self.assertEqual(digest.image.file, expected_image_file)
def test_get_load_latest_returns_expected(self, mocked_request): frozen_client = client_factory('YUKON') expected_url = 'http://www.yukonenergy.ca/consumption/chart_current.php?chart=current' expected_response = read_fixture(frozen_client.__module__, 'current_2017-10-11.html') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = frozen_client.get_load(latest=True) self.assertEqual(len(results), 1) self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-11T10:40:00.000Z')) self.assertAlmostEqual(results[0]['load_MW'], 38.74)
def test_no_input(self): httpretty.register_uri( httpretty.GET, URI_PRODUCTION, body=read_fixture("public_keys.json") ) args = "--mode production".split(" ") exitcode, out_data, err_data = run_main(args=args) self.assertEqual("input file or pipe data must be provided\n", err_data) self.assertEqual(2, exitcode) self.assertEqual(b"", out_data)
def test_parse_load_forecast(self): self.c.options = {'data': 'dummy'} load_forecast_csv = read_fixture(ba_name='nyiso', filename='20171122isolf.csv') data = self.c.parse_load_forecast(load_forecast_csv.encode('utf-8')) for idx, row in data.iterrows(): self.assertGreaterEqual(idx.date(), date(2017, 11, 22)) self.assertLessEqual(idx.date(), date(2017, 11, 28)) self.assertGreater(row['load_MW'], 12000) self.assertLess(row['load_MW'], 22000) # should have 6 days of hourly data self.assertEqual(len(data), 24*6)
def test_command(): input_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'fixtures', 'schema', 'person-statement.json') with patch('sys.stdout', new_callable=StringIO) as actual: mapping_sheet = bodskit.mapping_sheet.MappingSheet(input_filename=input_filename) mapping_sheet.handle() assert actual.getvalue() == \ read_fixture('mapping-sheet-person-statement.csv').replace('\n', '\r\n') # not sure why
def test_parse_trade(self): self.c.options = {'data': 'dummy'} external_limits_flows_csv = read_fixture( ba_name='nyiso', filename='20171122ExternalLimitsFlows.csv') df = self.c.parse_trade(external_limits_flows_csv.encode('utf-8')) for idx, row in df.iterrows(): self.assertIn(idx.date(), [date(2017, 11, 22), date(2017, 11, 23)]) self.assertLess(row['net_exp_MW'], -1400) self.assertGreater(row['net_exp_MW'], -6300) self.assertEqual(df.index.name, 'timestamp')
def test_get_load_historical_and_forecast_date_range_returns_expected(self, mocked_request): hours = 12 start_at = self.tzaware_utcnow - timedelta(hours=hours) end_at = self.tzaware_utcnow + timedelta(hours=hours) expected_historical_url = 'http://www.nspower.ca/system_report/today/currentload.json' expected_historical_response = read_fixture(self.c.__module__, 'currentload.json') mocked_request.get(expected_historical_url, content=expected_historical_response.encode('utf-8')) expected_forecast_url = 'http://www.nspower.ca/system_report/today/forecast.json' expected_forecast_reponse = read_fixture(self.c.__module__, 'forecast.json') mocked_request.get(expected_forecast_url, content=expected_forecast_reponse.encode('utf-8')) results = self.c.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(results), hours * 2) # Spot check values at the start and end of the results self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-05T00:00:00.000Z')) self.assertAlmostEqual(results[0]['load_MW'], 877.25) self.assertEqual(results[23]['timestamp'], Timestamp('2017-10-05T23:00:00.000Z')) self.assertAlmostEqual(results[23]['load_MW'], 1020)
def test_get_load_latest(self, exptected_requests): mocked_html = read_fixture( 'nbpower', 'SystemInformation_realtime.html').encode('utf8') exptected_requests.get(self.nbpower_client.LATEST_REPORT_URL, content=mocked_html) load_ts = self.nbpower_client.get_load(latest=True) self.assertEqual(len(load_ts), 1) self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-07-17T01:57:29.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 1150)
def test_get_load_dange_range_without_forecast(self, expected_requests): mocked_html = read_fixture('nbpower', 'SystemInformation_realtime.html').encode('utf8') expected_requests.get(self.nbpower_client.LATEST_REPORT_URL, content=mocked_html) start_at = parse('2017-07-16T21:00:00-03:00') # End time is the same as freeze_time (i.e. end_at = "now"). end_at = parse('2017-07-16T22:58:00-03:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(load_ts), 1) self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-07-17T01:57:29.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 1150)
def test_build_medium_content_with_jats_and_image(self): "test building from a DOCX file and converting to Medium content" docx_file = "DIGEST 99999.docx" jats_file = fixture_file("elife-99999-v0.xml") image_file_name = "IMAGE 99999.jpeg" expected_medium_content = read_fixture("medium_content_jats_99999.py") # build the digest object medium_content = medium_post.build_medium_content( data_path(docx_file), "tmp", self.digest_config, jats_file, image_file_name) # test assertions self.assertEqual(medium_content, expected_medium_content)
def test_enum_in_list(): """Special test for problem reported with sourceType checklist - values where not being reported""" input_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'fixtures', 'mapping-sheet-enum-in-list-input.json') with patch('sys.stdout', new_callable=StringIO) as actual: mapping_sheet = bodskit.mapping_sheet.MappingSheet(input_filename=input_filename) mapping_sheet.handle() assert actual.getvalue() == \ read_fixture('mapping-sheet-enum-in-list-output.csv').replace('\n', '\r\n') # not sure why
def test_get_generation_standard_time(self, mock_request): expected_url = 'http://content.caiso.com/green/renewrpt/20171106_DailyRenewablesWatch.txt' expected_response = read_fixture(self.c.__module__, '20171106_DailyRenewablesWatch.txt').encode('utf-8') mock_request.get(expected_url, content=expected_response) start_at = parse('2017-11-06T00:00:00-08:00') end_at = parse('2017-11-06T23:59:59-08:00') generation = self.c.get_generation(start_at=start_at, end_at=end_at, # FIXME: Non-base kwargs are required to route to _generation_historical() market=self.c.MARKET_CHOICES.hourly, freq=self.c.FREQUENCY_CHOICES.hourly) self.assertEqual(generation[0]['timestamp'], Timestamp('2017-11-06T08:00:00Z')) # '2017-11-06T00:00:00-08:00' self.assertEqual(generation[239]['timestamp'], Timestamp('2017-11-07T07:00:00Z')) # '2017-11-06T23:00:00-08:00'
def test_parse_legacy_genmix(self): """ Tests that legacy generation mix data format can still be parsed if someone requests a historical time range. """ self.c.options = {'data': 'dummy'} legacy_ftfuelmix_csv = read_fixture(ba_name='nyiso', filename='20160119rtfuelmix.csv') df = self.c.parse_genmix(legacy_ftfuelmix_csv.encode('utf-8')) for idx, row in df.iterrows(): self.assertIn(idx.date(), [date(2016, 1, 19), date(2016, 1, 20)]) self.assertLess(row['gen_MW'], 5500) self.assertIn(row['fuel_name'], self.c.fuel_names.values()) self.assertEqual(df.index.name, 'timestamp')
def setUp(self): self.c = client_factory('CAISO') self.ren_report_tsv = read_fixture(self.c.__module__, 'ren_report.csv') self.sld_fcst_xml = read_fixture(self.c.__module__, 'sld_forecast.xml') self.ene_slrs_xml = read_fixture(self.c.__module__, 'ene_slrs.xml') self.sld_ren_fcst_xml = read_fixture(self.c.__module__, 'sld_ren_forecast.xml') self.systemconditions_html = read_fixture(self.c.__module__, 'systemconditions.html') self.todays_outlook_renewables = read_fixture(self.c.__module__, 'todays_outlook_renewables.html')
def test_get_load_dange_range_with_latest_and_forecast(self, expected_requests): self.nbpower_client = client_factory('NBP') exp_forect_url = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/2017-07-16%2022.csv' mocked_csv = read_fixture('nbpower', '2017-07-16 22.csv').encode('utf8') mocked_html = read_fixture('nbpower', 'SystemInformation_realtime.html').encode('utf8') expected_requests.get(self.nbpower_client.LATEST_REPORT_URL, content=mocked_html) expected_requests.get(exp_forect_url, content=mocked_csv) start_at = parse('2017-07-16T00:00:00-03:00') end_at = parse('2017-07-17T01:00:00-03:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(load_ts), 4) # latest + 3 forecasts. # The first forecast is in the past, so the first timestamp should be from the latest report. self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-07-17T01:57:29.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 1150) self.assertEqual(load_ts[1].get('timestamp', None), Timestamp('2017-07-17T02:00:00.000Z')) self.assertEqual(load_ts[1].get('load_MW', None), 1160) self.assertEqual(load_ts[2].get('timestamp', None), Timestamp('2017-07-17T03:00:00.000Z')) self.assertEqual(load_ts[2].get('load_MW', None), 1129) self.assertEqual(load_ts[3].get('timestamp', None), Timestamp('2017-07-17T04:00:00.000Z')) self.assertEqual(load_ts[3].get('load_MW', None), 1089)
def test_get_load_forecast_dst(self, expected_requests): self.nbpower_client = client_factory('NBP') exp_forect_url = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/2017-03-13%2000.csv' mocked_csv = read_fixture('nbpower', '2017-03-13 00.csv').encode('utf8') expected_requests.get(exp_forect_url, content=mocked_csv) start_at = parse('2017-03-13T00:00:00-03:00') end_at = parse('2017-03-13T03:00:00-03:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(load_ts), 4) # 4 hours of load forecasts self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-03-13T03:00:00.000Z')) self.assertEqual(load_ts[3].get('timestamp', None), Timestamp('2017-03-13T06:00:00.000Z'))
def test_get_generation_dst_end(self, mock_request): expected_url = 'http://content.caiso.com/green/renewrpt/20171105_DailyRenewablesWatch.txt' expected_response = read_fixture(self.c.__module__, '20171105_DailyRenewablesWatch.txt').encode('utf-8') mock_request.get(expected_url, content=expected_response) start_at = parse('2017-11-05T00:00:00-07:00') end_at = parse('2017-11-05T23:59:59-08:00') generation = self.c.get_generation(start_at=start_at, end_at=end_at, # FIXME: Non-base kwargs are required to route to _generation_historical() market=self.c.MARKET_CHOICES.hourly, freq=self.c.FREQUENCY_CHOICES.hourly) self.assertEqual(generation[0]['timestamp'], Timestamp('2017-11-05T07:00:00Z')) # '2017-11-05T00:00:00-07:00' self.assertEqual(generation[9]['timestamp'], Timestamp('2017-11-05T08:00:00Z')) # '2017-11-05T01:00:00-07:00' # "Hour 1" appears only once in 25 hour day. The 01:00-08:00 hour is skipped, leaving a gap in UTC timeseries. self.assertEqual(generation[18]['timestamp'], Timestamp('2017-11-05T010:00:00Z')) # '2017-11-05T02:00:00-08:00' self.assertEqual(generation[239]['timestamp'], Timestamp('2017-11-06T07:00:00Z')) # '2017-11-05T23:00:00-08:00'
def test_get_generation_success(self, mock_request): expected_response = read_fixture(self.c.NAME, 'chart-values.json').encode('utf8') mock_request.get('http://www.gov.pe.ca/windenergy/chart-values.php', content=expected_response) expected_timestamp = Timestamp('2017-09-25T10:01:01.000Z') load_ts = self.c.get_generation(latest=True) self.assertEqual(len(load_ts), 3) self.assertEqual(load_ts[0].get('timestamp', None), expected_timestamp) self.assertEqual(load_ts[0].get('fuel_name', None), 'oil') self.assertEqual(load_ts[0].get('gen_MW', None), 0) self.assertEqual(load_ts[1].get('timestamp', None), expected_timestamp) self.assertEqual(load_ts[1].get('fuel_name', None), 'other') self.assertEqual(load_ts[1].get('gen_MW', None), 146.01) self.assertEqual(load_ts[2].get('timestamp', None), expected_timestamp) self.assertEqual(load_ts[2].get('fuel_name', None), 'wind') self.assertEqual(load_ts[2].get('gen_MW', None), 4.55)
def test_get_load_forecast_dst_start(self, expected_requests): self.nbpower_client = client_factory('NBP') exp_forect_url = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/2017-03-12%2000.csv' mocked_csv = read_fixture('nbpower', '2017-03-12 00.csv').encode('utf8') expected_requests.get(exp_forect_url, content=mocked_csv) start_at = parse('2017-03-12T00:00:00-04:00') end_at = parse('2017-03-12T04:00:00-03:00') load_ts = self.nbpower_client.get_load(start_at=start_at, end_at=end_at) self.assertEqual(len(load_ts), 3) # 4 forecasts - 1 for quirky duplicate 2017-03-12T03:00-04:00 overwrite self.assertEqual(load_ts[0].get('timestamp', None), Timestamp('2017-03-12T04:00:00.000Z')) self.assertEqual(load_ts[0].get('load_MW', None), 2020) self.assertEqual(load_ts[1].get('timestamp', None), Timestamp('2017-03-12T05:00:00.000Z')) self.assertEqual(load_ts[1].get('load_MW', None), 1982) self.assertEqual(load_ts[2].get('timestamp', None), Timestamp('2017-03-12T06:00:00.000Z')) self.assertEqual(load_ts[2].get('load_MW', None), 1974) # second T03:00-03:00 load value overwrites first
def test_get_generation_valid_date_range_returns_expected(self, mocked_request): start_at = self.tzaware_utcnow - timedelta(hours=12) end_at = self.tzaware_utcnow expected_url = 'http://www.nspower.ca/system_report/today/currentmix.json' expected_response = read_fixture(self.c.__module__, 'currentmix.json') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = self.c.get_generation(start_at=start_at, end_at=end_at) expected_length = 96 # 8 fuels * 12 hours self.assertEqual(len(results), expected_length) # Spot check values at the start and end of the results self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-05T00:00:00.000Z')) self.assertEqual(results[0]['fuel_name'], 'biomass') self.assertAlmostEqual(results[0]['gen_MW'], 1.95) self.assertEqual(results[95]['timestamp'], Timestamp('2017-10-05T11:00:00.000Z')) self.assertEqual(results[95]['fuel_name'], 'wind') self.assertAlmostEqual(results[95]['gen_MW'], 35.52)
def test_get_load_valid_date_range_standard_time_returns_expected(self, mocked_request): frozen_client = client_factory('YUKON') frozen_utcnow = datetime.utcnow().replace(tzinfo=pytz.utc) start_at = frozen_utcnow - timedelta(hours=12) end_at = frozen_utcnow expected_url = 'http://www.yukonenergy.ca/consumption/chart.php?chart=hourly' expected_response = read_fixture(frozen_client.__module__, 'hourly_2017-11-11.html') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = frozen_client.get_load(start_at=start_at, end_at=end_at) expected_length = 12 self.assertEqual(len(results), expected_length) # Spot check values at the start and end of the results self.assertEqual(results[0]['timestamp'], Timestamp('2017-11-11T01:00:00Z')) self.assertAlmostEqual(results[0]['load_MW'], 70.15) self.assertEqual(results[11]['timestamp'], Timestamp('2017-11-11T12:00:00Z')) self.assertAlmostEqual(results[11]['load_MW'], 51.73)
def test_get_generation_valid_date_range_during_dst_returns_expected(self, mocked_request): frozen_client = client_factory('YUKON') frozen_utcnow = datetime.utcnow().replace(tzinfo=pytz.utc) start_at = frozen_utcnow - timedelta(hours=12) end_at = frozen_utcnow expected_url = 'http://www.yukonenergy.ca/consumption/chart.php?chart=hourly' expected_response = read_fixture(frozen_client.__module__, 'hourly_2017-10-11.html') mocked_request.get(expected_url, content=expected_response.encode('utf-8')) results = frozen_client.get_generation(start_at=start_at, end_at=end_at) expected_length = 22 # 2 fuels * 11 hours (1 hour is missing in this particular response) self.assertEqual(len(results), expected_length) # Spot check values at the start and end of the results self.assertEqual(results[0]['timestamp'], Timestamp('2017-10-10T23:00:00Z')) self.assertEqual(results[0]['fuel_name'], 'hydro') self.assertAlmostEqual(results[0]['gen_MW'], 51.36) self.assertEqual(results[21]['timestamp'], Timestamp('2017-10-11T09:00:00Z')) self.assertEqual(results[21]['fuel_name'], 'thermal') self.assertAlmostEqual(results[21]['gen_MW'], 0)
def test_when_detector_unavailable(self): image_processor = ImageProcessor() image_processor.detect('feat', read_fixture('broken.jpg'))
def test_when_not_animated_gif(self): image_processor = ImageProcessor() detect = image_processor.detect('face', read_fixture('one_face.gif')) expect(detect).Not.to_be_empty()
def test_when_animated_gif(self): image_processor = ImageProcessor() detect = image_processor.detect('all', read_fixture('animated.gif')) expect(detect).to_be_empty()
def test_feature_detection(self): image_processor = ImageProcessor() detect = image_processor.detect('feature', read_fixture('broken.jpg')) expect(detect).Not.to_be_empty()
def test_when_image_is_huge(self): image_processor = ImageProcessor() detect = image_processor.detect('all', read_fixture('huge_image.jpg')) expect(detect).Not.to_be_empty()