def test_redis_caching(): # Note: you need to install Redis in order for this to work! # read CSV from disk, and make sure to parse dates df = pd.read_csv("S&P500.csv", parse_dates=['Date'], index_col=['Date']) df.index = pd.to_datetime(df.index) from findatapy.market.ioengine import IOEngine io = IOEngine() use_cache_compression = [True, False] for u in use_cache_compression: # Write DataFrame to Redis (using pyarrow format) io.write_time_series_cache_to_disk('test_key', df, engine='redis', db_server=redis_server, db_port=redis_port, use_cache_compression=u) # Read back DataFrame from Redis (using pyarrow format) df_out = io.read_time_series_cache_from_disk('test_key', engine='redis', db_server=redis_server, db_port=redis_port) pd.testing.assert_frame_equal(df, df_out)
def test_path_join(): io = IOEngine() path = io.path_join("/home/hello", "hello", "hello") assert path == "/home/hello/hello/hello" path = io.path_join("s3://home/hello", "hello", "hello") assert path == "s3://home/hello/hello/hello"
def __init__(self): self.config = ConfigManager() self.logger = LoggerManager().getLogger(__name__) self.filter = Filter() self.calculations = Calculations() self.io_engine = IOEngine() self._intraday_code = -1 return
def __init__(self): self.config = ConfigManager().get_instance() self.filter = Filter() self.calculations = Calculations() self.io_engine = IOEngine() self._intraday_code = -1 self.days_expired_intraday_contract_download = -1 return
def __init__(self, data_vendor_dict={}): self._config = ConfigManager().get_instance() self._filter = Filter() self._calculations = Calculations() self._io_engine = IOEngine() self._intraday_code = -1 self._days_expired_intraday_contract_download = -1 self._data_vendor_dict = data_vendor_dict return
def __init__(self, log_every_day = 1): self.config = ConfigManager().get_instance() self.logger = LoggerManager().getLogger(__name__) self.filter = Filter() self.calculations = Calculations() self.io_engine = IOEngine() self._intraday_code = -1 self.days_expired_intraday_contract_download = -1 self.log_every_day = log_every_day return
freq="daily", quandl_api_key=quandl_api_key ) market = Market(market_data_generator=MarketDataGenerator()) df = market.fetch_market(md_request=md_request) print(df) folder = "../tests/" # Save to disk in a file name format friendly for reading later via # MarketDataRequest (ie. ../tests/backtest.fx.daily.quandl.NYC.parquet) IOEngine().write_time_series_cache_to_disk(folder, df, engine="parquet", md_request=md_request) md_request.data_engine = "../tests/*.parquet" df = market.fetch_market(md_request) print(df) if run_example == 4: # In this case we are saving predefined tick data tickers to disk, and # then reading back using the MarketDataRequest interface from findatapy.util.dataconstants import DataConstants from findatapy.market.ioengine import IOEngine md_request = MarketDataRequest(
fields=['bid', 'ask', 'bidv', 'askv'], ) market = Market() df = market.fetch_market(md_request=md_request) print(df) folder = 's3://type_your_s3_bucket_here' # Save to disk in a format friendly for reading later (ie. s3://bla_bla_bla/backtest.fx.tick.dukascopy.NYC.EURUSD.parquet) # Here it will automatically generate the filename from the folder we gave # and the MarketDataRequest we made (altenatively, we could have just given the filename directly) IOEngine().write_time_series_cache_to_disk(folder, df, engine='parquet', md_request=md_request) md_request.data_engine = folder + '/*.parquet' df = market.fetch_market(md_request) print(df) # Or we could have just read it directly using df = IOEngine().read_time_series_cache_from_disk(folder, df, engine='parquet', md_request=md_request) # We can try this using daily data
category='fx', data_source='quandl', freq='daily', quandl_api_key=quandl_api_key ) market = Market(market_data_generator=MarketDataGenerator()) df = market.fetch_market(md_request=md_request) print(df) folder = '../tests/' # Save to disk in a file name format friendly for reading later via MarketDataRequest (ie. ../tests/backtest.fx.daily.quandl.NYC.parquet) IOEngine().write_time_series_cache_to_disk(folder, df, engine='parquet', md_request=md_request) md_request.data_engine = '../tests/*.parquet' df = market.fetch_market(md_request) print(df) if run_example == 4: # In this case we are saving predefined tick data tickers to disk, and then reading back using the MarketDataRequest interface from findatapy.util.dataconstants import DataConstants from findatapy.market.ioengine import IOEngine md_request = MarketDataRequest( start_date='01 Jan 2021', finish_date='05 Jan 2021',
tickers=["EURUSD"], fields=["bid", "ask", "bidv", "askv"], ) market = Market() df = market.fetch_market(md_request=md_request) print(df) # Save to disk in a format friendly for reading later # (ie. s3://bla_bla_bla/backtest.fx.tick.dukascopy.NYC.EURUSD.parquet) # Here it will automatically generate the filename from the folder we gave # and the MarketDataRequest we made (altenatively, we could have just given # the filename directly) IOEngine().write_time_series_cache_to_disk(folder, df, engine="parquet", md_request=md_request) md_request.data_engine = folder + "/*.parquet" df = market.fetch_market(md_request) print(df) # Or we could have just read it directly using df = IOEngine().read_time_series_cache_from_disk(folder, df, engine="parquet", md_request=md_request) # We can try this using daily data import os