def get_volatile_cache(version=constants.tcapy_version): if version not in Mediator._volatile_cache.keys(): with Mediator._volatile_cache_lock: from tcapy.data.volatilecache import VolatileRedis as VolatileCache # from tcapy.data.volatilecache import VolatileDictionary as VolatileCache Mediator._volatile_cache[version] = VolatileCache() return Mediator._volatile_cache[version]
def test_cache_handle(): """Tests the storing of DataFrames in the CacheHandle """ from tcapy.data.volatilecache import VolatileRedis as VolatileCache volatile_cache = VolatileCache() dt = pd.date_range(start='01 Jan 2017', end='05 Jan 2019', freq='1m') df = pd.DataFrame(index=dt, columns=['bid', 'mid', 'ask']) df['mid'] = np.ones(len(dt)) ch = volatile_cache.put_dataframe_handle(df, use_cache_handles=True) df_1 = volatile_cache.get_dataframe_handle(ch, burn_after_reading=True) assert_frame_equal(df, df_1)
def test_data_frame_holder(): """Tests the storing of DataFrameHolder object which is like an enhanced dict specifically for storing DataFrames, alongside using the VolatileCache """ from tcapy.analysis.dataframeholder import DataFrameHolder from tcapy.data.volatilecache import VolatileRedis as VolatileCache volatile_cache = VolatileCache() # Create a very large DataFrame, which needs to be chunked in storage dt = pd.date_range(start='01 Jan 2000', end='05 Jan 2020', freq='10s') df = pd.DataFrame(index=dt, columns=['bid', 'mid', 'ask']) df['bid'] = np.ones(len(dt)) df['mid'] = np.ones(len(dt)) df['ask'] = np.ones(len(dt)) df_list = TimeSeriesOps().split_array_chunks(df, chunks=2) df_lower = df_list[0] df_higher = df_list[1] for i in ['_comp', '']: df_holder = DataFrameHolder() df_holder.add_dataframe( volatile_cache.put_dataframe_handle(df_lower, use_cache_handles=True), 'EURUSD_df' + i) df_holder.add_dataframe( volatile_cache.put_dataframe_handle(df_higher, use_cache_handles=True), 'EURUSD_df' + i) df_dict = df_holder.get_combined_dataframe_dict() df_final = df_dict['EURUSD_df' + i] assert_frame_equal(df, df_final)