def testDownloadAndParseWeekly(self): with common.TmpDir() as tmpPath: instrument = "AAPL" path = os.path.join(tmpPath, "quandl-aapl-weekly-2010.csv") quandl.download_weekly_bars("WIKI", instrument, 2010, path, auth_token) bf = quandlfeed.Feed(frequency=bar.Frequency.WEEK) bf.addBarsFromCSV(instrument, path) bf.loadAll() # Quandl used to report 2010-1-3 as the first week of 2010. self.assertTrue(bf[instrument][0].getDateTime() in [ datetime.datetime(2010, 1, 3), datetime.datetime(2010, 1, 10) ]) self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 26)) self.assertEquals(bf[instrument][-1].getOpen(), 325.0) self.assertEquals(bf[instrument][-1].getHigh(), 325.15) self.assertEquals(bf[instrument][-1].getLow(), 323.17) self.assertEquals(bf[instrument][-1].getClose(), 323.6) self.assertEquals(bf[instrument][-1].getVolume(), 7969900) self.assertEquals(bf[instrument][-1].getPrice(), 323.6) # Not checking against a specific value since this is going to change # as time passes by. self.assertNotEquals(bf[instrument][-1].getAdjClose(), None)
def testResampleNinjaTraderDay(self): with common.TmpDir() as tmp_path: # Resample. feed = ninjatraderfeed.Feed(ninjatraderfeed.Frequency.MINUTE) feed.addBarsFromCSV( "spy", common.get_data_file_path("nt-spy-minute-2011.csv")) resampledBarDS = resampled_ds.ResampledBarDataSeries( feed["spy"], bar.Frequency.DAY) resampledFile = os.path.join(tmp_path, "day-nt-spy-minute-2011.csv") resample.resample_to_csv(feed, bar.Frequency.DAY, resampledFile) resampledBarDS.pushLast( ) # Need to manually push the last stot since time didn't change. # Load the resampled file. feed = csvfeed.GenericBarFeed(bar.Frequency.DAY) feed.addBarsFromCSV("spy", resampledFile, marketsession.USEquities.getTimezone()) feed.loadAll() self.assertEqual(len(feed["spy"]), 25) self.assertEqual( feed["spy"][0].getDateTime(), dt.localize(datetime.datetime(2011, 1, 3), marketsession.USEquities.getTimezone())) self.assertEqual( feed["spy"][-1].getDateTime(), dt.localize(datetime.datetime(2011, 2, 1), marketsession.USEquities.getTimezone())) self.assertEqual(len(resampledBarDS), len(feed["spy"])) self.assertEqual(resampledBarDS[0].getDateTime(), dt.as_utc(datetime.datetime(2011, 1, 3))) self.assertEqual(resampledBarDS[-1].getDateTime(), dt.as_utc(datetime.datetime(2011, 2, 1)))
def testBuildFeedWeekly(self): with common.TmpDir() as tmpPath: instrument = "AAPL" bf = quandl.build_feed("WIKI", [instrument], 2010, 2010, tmpPath, bar.Frequency.WEEK, authToken=auth_token) bf.loadAll() # Quandl used to report 2010-1-3 as the first week of 2010. self.assertTrue(bf[instrument][0].getDateTime() in [ datetime.datetime(2010, 1, 3), datetime.datetime(2010, 1, 10) ]) self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 26)) self.assertEquals(bf[instrument][-1].getOpen(), 325.0) self.assertEquals(bf[instrument][-1].getHigh(), 325.15) self.assertEquals(bf[instrument][-1].getLow(), 323.17) self.assertEquals(bf[instrument][-1].getClose(), 323.6) self.assertEquals(bf[instrument][-1].getVolume(), 7969900) self.assertEquals(bf[instrument][-1].getPrice(), 323.6) # Not checking against a specific value since this is going to change # as time passes by. self.assertNotEquals(bf[instrument][-1].getAdjClose(), None)
def testBuildDailyFeed(self): with common.TmpDir() as tmpPath: instrument = "orcl" bf = yahoofinance.build_feed([instrument], 2010, 2010, storage=tmpPath) bf.loadAll() self.assertEqual(bf[instrument][-1].getOpen(), 31.22) self.assertEqual(bf[instrument][-1].getClose(), 31.30)
def testInvalidInstrument(self): instrument = "inexistent" # Don't skip errors. with self.assertRaisesRegexp(Exception, "HTTP Error 404: Not Found"): with common.TmpDir() as tmpPath: quandl.build_feed( instrument, [instrument], 2010, 2010, tmpPath, bar.Frequency.WEEK, authToken=auth_token ) # Skip errors. with common.TmpDir() as tmpPath: bf = quandl.build_feed( instrument, [instrument], 2010, 2010, tmpPath, bar.Frequency.WEEK, authToken=auth_token, skipErrors=True ) bf.loadAll() self.assertNotIn(instrument, bf)
def testBuildWeeklyFeed(self): with common.TmpDir() as tmpPath: instrument = "aapl" bf = yahoofinance.build_feed([instrument], 2013, 2013, storage=tmpPath, frequency=bar.Frequency.WEEK) bf.loadAll() self.assertEqual(round(bf[instrument][-1].getOpen(), 2), 557.46) self.assertEqual(round(bf[instrument][-1].getHigh(), 2), 561.28) self.assertEqual(round(bf[instrument][-1].getLow(), 2), 540.43) self.assertEqual(round(bf[instrument][-1].getClose(), 2), 540.98) self.assertTrue(bf[instrument][-1].getVolume() in (9852500, 9855900, 68991600))
def testDownloadAndParseDaily(self): instrument = "orcl" with common.TmpDir() as tmp_path: path = os.path.join(tmp_path, "orcl-2010.csv") googlefinance.download_daily_bars(instrument, 2010, path) bf = googlefeed.Feed() bf.addBarsFromCSV(instrument, path) bf.loadAll() self.assertEqual(bf[instrument][-1].getOpen(), 31.22) self.assertEqual(bf[instrument][-1].getClose(), 31.30)
def testMapColumnNames(self): with common.TmpDir() as tmpPath: bf = quandl.build_feed("YAHOO", ["AAPL"], 2010, 2010, tmpPath, columnNames={"adj_close": "Adjusted Close"}) bf.setUseAdjustedValues(True) bf.loadAll() self.assertEquals(bf["AAPL"][-1].getClose(), 322.560013) self.assertIsNotNone(bf["AAPL"][-1].getAdjClose()) self.assertIsNotNone(bf["AAPL"][-1].getPrice())
def testInvalidInstrument(self): instrument = "inexistent" # Don't skip errors. with self.assertRaisesRegexp(Exception, "HTTP Error 400: Bad Request"): with common.TmpDir() as tmpPath: bf = googlefinance.build_feed([instrument], 2100, 2101, storage=tmpPath, frequency=bar.Frequency.DAY) # Skip errors. with common.TmpDir() as tmpPath: bf = googlefinance.build_feed([instrument], 2100, 2101, storage=tmpPath, frequency=bar.Frequency.DAY, skipErrors=True) bf.loadAll() self.assertNotIn(instrument, bf)
def testExtraColumns(self): with common.TmpDir() as tmpPath: columnNames = {"open": "Last", "close": "Last"} bf = quandl.build_feed("BITSTAMP", ["USD"], 2014, 2014, tmpPath, columnNames=columnNames) bf.loadAll() self.assertEquals(bf["USD"][-1].getExtraColumns()["Bid"], 319.19) self.assertEquals(bf["USD"][-1].getExtraColumns()["Ask"], 319.63) bids = bf["USD"].getExtraDataSeries("Bid") self.assertEquals(bids[-1], 319.19)
def testInvalidDates(self): instrument = "orcl" # Don't skip errors. with self.assertRaisesRegexp(Exception, "HTTP Error 404: Not Found"): with common.TmpDir() as tmpPath: bf = yahoofinance.build_feed([instrument], 2100, 2101, storage=tmpPath, frequency=bar.Frequency.DAY) # Skip errors. with common.TmpDir() as tmpPath: bf = yahoofinance.build_feed([instrument], 2100, 2101, storage=tmpPath, frequency=bar.Frequency.DAY, skipErrors=True) bf.loadAll() self.assertNotIn(instrument, bf)
def testBuildFeedDaily(self): with common.TmpDir() as tmpPath: instrument = "ORCL" bf = quandl.build_feed("WIKI", [instrument], 2010, 2010, tmpPath, authToken=auth_token) bf.loadAll() self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 31)) self.assertEquals(bf[instrument][-1].getOpen(), 31.22) self.assertEquals(bf[instrument][-1].getHigh(), 31.33) self.assertEquals(bf[instrument][-1].getLow(), 30.93) self.assertEquals(bf[instrument][-1].getClose(), 31.3) self.assertEquals(bf[instrument][-1].getVolume(), 11716300) self.assertEquals(bf[instrument][-1].getPrice(), 31.3) # Not checking against a specific value since this is going to change # as time passes by. self.assertNotEquals(bf[instrument][-1].getAdjClose(), None)
def testDownloadAndParseWeekly(self): instrument = "aapl" with common.TmpDir() as tmp_path: path = os.path.join(tmp_path, "aapl-weekly-2013.csv") yahoofinance.download_weekly_bars(instrument, 2013, path) bf = yahoofeed.Feed(frequency=bar.Frequency.WEEK) bf.addBarsFromCSV(instrument, path) bf.loadAll() self.assertEqual(round(bf[instrument][-1].getOpen(), 2), 557.46) self.assertEqual(round(bf[instrument][-1].getHigh(), 2), 561.28) self.assertEqual(round(bf[instrument][-1].getLow(), 2), 540.43) self.assertEqual(round(bf[instrument][-1].getClose(), 2), 540.98) self.assertTrue(bf[instrument][-1].getVolume() in (9852500, 9855900, 68991600))
def testDownloadAndParseDailyNoAdjClose(self): with common.TmpDir() as tmpPath: instrument = "ORCL" path = os.path.join(tmpPath, "quandl-daily-orcl-2013.csv") quandl.download_daily_bars("GOOG", "NASDAQ_ORCL", 2013, path, auth_token) bf = quandlfeed.Feed() bf.setNoAdjClose() bf.addBarsFromCSV(instrument, path) bf.loadAll() self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2013, 12, 31)) self.assertEquals(bf[instrument][-1].getOpen(), 37.94) self.assertEquals(bf[instrument][-1].getHigh(), 38.34) self.assertEquals(bf[instrument][-1].getLow(), 37.88) self.assertEquals(bf[instrument][-1].getClose(), 38.26) self.assertEquals(bf[instrument][-1].getVolume(), 11747517) self.assertEquals(bf[instrument][-1].getAdjClose(), None) self.assertEquals(bf[instrument][-1].getPrice(), 38.26)
def testDownloadAndParseDaily(self): with common.TmpDir() as tmpPath: instrument = "ORCL" path = os.path.join(tmpPath, "quandl-daily-orcl-2010.csv") quandl.download_daily_bars("WIKI", instrument, 2010, path, auth_token) bf = quandlfeed.Feed() bf.addBarsFromCSV(instrument, path) bf.loadAll() self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 31)) self.assertEquals(bf[instrument][-1].getOpen(), 31.22) self.assertEquals(bf[instrument][-1].getHigh(), 31.33) self.assertEquals(bf[instrument][-1].getLow(), 30.93) self.assertEquals(bf[instrument][-1].getClose(), 31.3) self.assertEquals(bf[instrument][-1].getVolume(), 11716300) self.assertEquals(bf[instrument][-1].getPrice(), 31.3) # Not checking against a specific value since this is going to change # as time passes by. self.assertNotEquals(bf[instrument][-1].getAdjClose(), None)
def testBuildFeedDaily(self): with common.TmpDir() as tmpPath: instrument = "ORCL" bf = quandl.build_feed("WIKI", [instrument], 2010, 2010, tmpPath, authToken=auth_token) bf.loadAll() self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 31)) self.assertEquals(bf[instrument][-1].getOpen(), 31.22) self.assertEquals(bf[instrument][-1].getHigh(), 31.33) self.assertEquals(bf[instrument][-1].getLow(), 30.93) self.assertEquals(bf[instrument][-1].getClose(), 31.3) self.assertEquals(bf[instrument][-1].getVolume(), 11716300) self.assertEquals(bf[instrument][-1].getAdjClose(), 30.23179912467581) self.assertEquals(bf[instrument][-1].getPrice(), 31.3)
def testDownloadAndParseDaily(self): with common.TmpDir() as tmpPath: instrument = "ORCL" path = os.path.join(tmpPath, "quandl-daily-orcl-2010.csv") quandl.download_daily_bars("WIKI", instrument, 2010, path, auth_token) bf = quandlfeed.Feed() bf.addBarsFromCSV(instrument, path) bf.loadAll() self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 31)) self.assertEquals(bf[instrument][-1].getOpen(), 31.22) self.assertEquals(bf[instrument][-1].getHigh(), 31.33) self.assertEquals(bf[instrument][-1].getLow(), 30.93) self.assertEquals(bf[instrument][-1].getClose(), 31.3) self.assertEquals(bf[instrument][-1].getVolume(), 11716300) self.assertEquals(bf[instrument][-1].getAdjClose(), 30.23179912467581) self.assertEquals(bf[instrument][-1].getPrice(), 31.3)
def testResampleNinjaTraderHour(self): with common.TmpDir() as tmp_path: # Resample. feed = ninjatraderfeed.Feed(ninjatraderfeed.Frequency.MINUTE) feed.addBarsFromCSV( "spy", common.get_data_file_path("nt-spy-minute-2011.csv")) resampledBarDS = resampled_ds.ResampledBarDataSeries( feed["spy"], bar.Frequency.HOUR) resampledFile = os.path.join(tmp_path, "hour-nt-spy-minute-2011.csv") resample.resample_to_csv(feed, bar.Frequency.HOUR, resampledFile) resampledBarDS.pushLast( ) # Need to manually push the last stot since time didn't change. # Load the resampled file. feed = csvfeed.GenericBarFeed( bar.Frequency.HOUR, marketsession.USEquities.getTimezone()) feed.addBarsFromCSV("spy", resampledFile) feed.loadAll() self.assertEqual(len(feed["spy"]), 340) self.assertEqual( feed["spy"][0].getDateTime(), dt.localize(datetime.datetime(2011, 1, 3, 9), marketsession.USEquities.getTimezone())) self.assertEqual( feed["spy"][-1].getDateTime(), dt.localize(datetime.datetime(2011, 2, 1, 1), marketsession.USEquities.getTimezone())) self.assertEqual(feed["spy"][0].getOpen(), 126.35) self.assertEqual(feed["spy"][0].getHigh(), 126.45) self.assertEqual(feed["spy"][0].getLow(), 126.3) self.assertEqual(feed["spy"][0].getClose(), 126.4) self.assertEqual(feed["spy"][0].getVolume(), 3397.0) self.assertEqual(feed["spy"][0].getAdjClose(), None) self.assertEqual(len(resampledBarDS), len(feed["spy"])) self.assertEqual(resampledBarDS[0].getDateTime(), dt.as_utc(datetime.datetime(2011, 1, 3, 9))) self.assertEqual(resampledBarDS[-1].getDateTime(), dt.as_utc(datetime.datetime(2011, 2, 1, 1)))
def testDownloadAndParseDaily(self): instrument = "orcl" barFeed = yahoofeed.Feed() barFeed.addBarsFromCSV(instrument, common.get_data_file_path("orcl-2000-yahoofinance.csv")) strat = sma_crossover.SMACrossOver(barFeed, instrument, 20) plt = plotter.StrategyPlotter(strat, True, True, True) plt.getInstrumentSubplot(instrument).addDataSeries("sma", strat.getSMA()) strat.run() with common.TmpDir() as tmpPath: fig = plt.buildFigure() fig.set_size_inches(10, 8) png = os.path.join(tmpPath, "plotter_test.png") fig.savefig(png) # Check that file size looks ok. # 118458 on Mac # 116210 on Linux self.assertGreater( os.stat(png).st_size, 110000 )
def testDownloadAndParseDaily_UseAdjClose(self): with common.TmpDir() as tmpPath: instrument = "ORCL" path = os.path.join(tmpPath, "quandl-daily-orcl-2010.csv") quandl.download_daily_bars("WIKI", instrument, 2010, path, auth_token) bf = quandlfeed.Feed() bf.addBarsFromCSV(instrument, path) # Need to setUseAdjustedValues(True) after loading the file because we # can't tell in advance if adjusted values are there or not. bf.setUseAdjustedValues(True) bf.loadAll() self.assertEquals(bf[instrument][-1].getDateTime(), datetime.datetime(2010, 12, 31)) self.assertEquals(bf[instrument][-1].getOpen(), 31.22) self.assertEquals(bf[instrument][-1].getHigh(), 31.33) self.assertEquals(bf[instrument][-1].getLow(), 30.93) self.assertEquals(bf[instrument][-1].getClose(), 31.3) self.assertEquals(bf[instrument][-1].getVolume(), 11716300) self.assertEquals(bf[instrument][-1].getPrice(), bf[instrument][-1].getAdjClose()) # Not checking against a specific value since this is going to change # as time passes by. self.assertNotEquals(bf[instrument][-1].getAdjClose(), None)
def testDownloadAndParseDaily(self): instrument = "orcl" barFeed = yahoofeed.Feed() barFeed.addBarsFromCSV( instrument, common.get_data_file_path("orcl-2000-yahoofinance.csv")) strat = sma_crossover.SMACrossOver(barFeed, instrument, 20) plt = plotter.StrategyPlotter(strat, True, True, True) plt.getInstrumentSubplot(instrument).addDataSeries( "sma", strat.getSMA()) strat.run() with common.TmpDir() as tmpPath: fig = plt.buildFigure() fig.set_size_inches(10, 8) png = os.path.join(tmpPath, "plotter_test.png") fig.savefig(png) self.assertEquals( open(common.get_data_file_path("plotter_test.png"), "r").read(), open(png, "r").read())