Exemple #1
0
def test_cache_handle():
    """Tests the storing of DataFrames in the CacheHandle
    """
    from tcapy.data.volatilecache import VolatileRedis as VolatileCache
    volatile_cache = VolatileCache()

    dt = pd.date_range(start='01 Jan 2017', end='05 Jan 2019', freq='1m')
    df = pd.DataFrame(index=dt, columns=['bid', 'mid', 'ask'])

    df['mid'] = np.ones(len(dt))
    ch = volatile_cache.put_dataframe_handle(df, use_cache_handles=True)

    df_1 = volatile_cache.get_dataframe_handle(ch, burn_after_reading=True)

    assert_frame_equal(df, df_1)
Exemple #2
0
    def get_volatile_cache(volatile_cache_engine=constants.volatile_cache_engine):
        if volatile_cache_engine not in Mediator._volatile_cache.keys():
            with Mediator._volatile_cache_lock:
                if volatile_cache_engine == 'redis':
                    from tcapy.data.volatilecache import VolatileRedis
                    Mediator._volatile_cache[volatile_cache_engine] = VolatileRedis()

                elif volatile_cache_engine == 'plasma':
                    from tcapy.data.volatilecache import VolatilePlasma
                    Mediator._volatile_cache[volatile_cache_engine] = VolatilePlasma()


        return Mediator._volatile_cache[volatile_cache_engine]
Exemple #3
0
def test_data_frame_holder():
    """Tests the storing of DataFrameHolder object which is like an enhanced dict specifically for storing DataFrames,
    alongside using the VolatileCache
    """
    from tcapy.analysis.dataframeholder import DataFrameHolder
    from tcapy.data.volatilecache import VolatileRedis as VolatileCache
    volatile_cache = VolatileCache()

    # Create a very large DataFrame, which needs to be chunked in storage
    dt = pd.date_range(start='01 Jan 2000', end='05 Jan 2020', freq='10s')
    df = pd.DataFrame(index=dt, columns=['bid', 'mid', 'ask'])

    df['bid'] = np.ones(len(dt))
    df['mid'] = np.ones(len(dt))
    df['ask'] = np.ones(len(dt))

    df_list = TimeSeriesOps().split_array_chunks(df, chunks=2)
    df_lower = df_list[0]
    df_higher = df_list[1]

    for i in ['_comp', '']:
        df_holder = DataFrameHolder()

        df_holder.add_dataframe(
            volatile_cache.put_dataframe_handle(df_lower,
                                                use_cache_handles=True),
            'EURUSD_df' + i)
        df_holder.add_dataframe(
            volatile_cache.put_dataframe_handle(df_higher,
                                                use_cache_handles=True),
            'EURUSD_df' + i)

        df_dict = df_holder.get_combined_dataframe_dict()

        df_final = df_dict['EURUSD_df' + i]

    assert_frame_equal(df, df_final)
# See the License for the specific language governing permissions and limitations under the License.
#

if __name__ == '__main__':

    # Need this for WINDOWS machines, to ensure multiprocessing stuff works properly
    from tcapy.util.swim import Swim;

    Swim()

    from tcapy.data.volatilecache import VolatileRedis

    import datetime;
    from datetime import timedelta

    # First delete the Redis cache
    volatile = VolatileRedis()
    volatile.clear_cache()

    from tcapy.analysis.tcaengine import TCARequest, TCAEngineImpl

    tca_engine = TCAEngineImpl()

    # Do a massive TCA computation for all currency pairs for the past year
    # this will cache all the data in Redis, which can be used later
    finish_date = datetime.datetime.utcnow().date() - timedelta(days=1)
    start_date = finish_date - timedelta(days=252)

    tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker='All')
    tca_engine.calculate_tca(tca_request)