Beispiel #1
0
    def update(timestamp, cu, ratios, rolling_pos, rolling_dev, channel_pos,
               channel_dev, correlation):

        # Update currency and ratios
        a, b = get_universe_singular(currencies, granularity)
        cu.loc[timestamp] = a
        ratios.loc[timestamp] = b

        # Update rolling based on new currencies
        r_p, r_d = get_rolling(cu.tail(windows[-1] + 10), currencies, windows)
        rolling_pos = rolling_pos.append(r_p.loc[r_p.timestamp == timestamp],
                                         ignore_index=True)
        rolling_dev = rolling_dev.append(r_d.loc[r_d.timestamp == timestamp],
                                         ignore_index=True)

        # Update Correlation ased on new currencies
        c = get_correlation(cu.tail(windows[-1] + 10), currencies, windows)
        correlation = correlation.append(c.loc[c.timestamp == timestamp],
                                         ignore_index=True)

        # Update channels based on new currencies
        ch_p, ch_d = get_channels(cu.tail(windows[-1] + 10), currencies,
                                  windows)
        channel_pos = channel_pos.append(ch_p.loc[ch_p.timestamp == timestamp],
                                         ignore_index=True)
        channel_dev = channel_dev.append(ch_d.loc[ch_d.timestamp == timestamp],
                                         ignore_index=True)

        # Append csv Files for update
        pd.DataFrame(cu.loc[pd.to_datetime(timestamp)]).T\
                    .to_csv('/Users/user/Desktop/test.csv',
                    mode='a', header=False, index=True)
        pd.DataFrame(ratios.loc[pd.to_datetime(timestamp)]).T\
                    .to_csv('/Users/user/Desktop/test.csv',
                    mode='a', header=False, index=True)
        rolling_pos.loc[rolling_pos.timestamp == timestamp]\
                    .to_csv(path + 'rolling_pos.csv', index=False,
                            header=False, mode='a')
        rolling_dev.loc[rolling_dev.timestamp == timestamp]\
                    .to_csv(path + 'rolling_dev.csv', index=False,
                            header=False, mode='a')
        channel_pos.loc[channel_pos.timestamp == timestamp]\
                    .to_csv(path + 'channel_pos.csv', index=False,
                            header=False, mode='a')
        channel_dev.loc[channel_dev.timestamp == timestamp]\
                    .to_csv(path + 'channel_dev.csv', index=False,
                            header=False, mode='a')
        correlation.loc[correlation.timestamp == timestamp]\
                    .to_csv(path + 'correlation.csv', index=False,
                            header=False, mode='a')

        return {
            'cu': cu,
            'ratios': ratios,
            'rolling_pos': rolling_pos,
            'rolling_dev': rolling_dev,
            'correlation': correlation,
            'channel_pos': channel_pos,
            'channel_dev': channel_dev
        }
    def update(timestamp):
        # Update currency and ratios
        a, b = get_universe_singular(currencies, granularity)
        cu.loc[timestamp] = a
        ratios.loc[timestamp] = b

        # Update rolling based on new currencies
        r = get_rolling(cu.tail(windows[-1] + 10), currencies, windows)
        rolling.loc[timestamp] = r.loc[r.last_valid_index()]

        # Update Correlation ased on new currencies
        c = get_correlation(cu.tail(windows[-1] + 10), currencies, windows)
        correlation.loc[timestamp] = c.loc[c.last_valid_index()]

        # Update channels based on new currencies
        ch = get_channels(cu.tail(windows[-1] + 10), currencies, windows)
        channels.loc[timestamp] = ch.loc[ch.last_valid_index()]

        # Export
        cu.to_csv(path + 'currencies.csv')
        ratios.to_csv(path + 'ratio.csv')
        correlation.to_csv(path + 'correlation.csv')
        rolling.to_csv(path + 'rolling.csv')
        channels.to_csv(path + 'channels.csv')
Beispiel #3
0
###############################################################################
if 1:

    cu.to_pickle(file)
    timestamp = cu.loc[cu.last_valid_index(), 'timestamp']
    # Look for new candle every _x_ seconds.  Update all info when found
    while True:
        # Do we have new information ?
        new_time = get_time(granularity)
        if new_time > timestamp:
            try:
                sleep(2)
                timestamp = new_time
                print('Candle Found at:\t' + str(timestamp))
                # Update ratios and cu with new data
                a, b = get_universe_singular(currencies, granularity)
                # Add line to cur
                a['timestamp'] = timestamp
                cu = cu.append(a, ignore_index=True, verify_integrity=True)
                # Export currencies
                cu.to_pickle(file)
                # Print Data
                print()
                for k, v in sorted(a.items()):
                    print(v)
                print()
                # Print Spread
                spreads = get_multiple_candles_spread(instrument_list,
                                                      granularity)
                for k, v in sorted(spreads.items()):
                    print(v)
###############################################################################
if 1:

    timestamp = get_time(granularities[0])

    # Look for new candle every _x_ seconds.  Update all info when found
    while True:

        # Do we have new information ?
        new_time = get_time(update_granularity)
        if new_time > timestamp:

            timestamp = new_time

            # Update ratios and cu with new data
            a, b = get_universe_singular(currencies, update_granularity)
            a['timestamp'] = timestamp
            b['timestamp'] = timestamp

            # Currencies
            cu = pd.read_pickle(path + str(update_granularity) + '.pkl')
            cu = cu.append(a, ignore_index=True, verify_integrity=True)
            cu.to_pickle(path + str(update_granularity) + '.pkl')

            # Ratios
            rat = pd.read_pickle(ratios_path + str(update_granularity) +
                                 '.pkl')
            rat = rat.append(b, ignore_index=True, verify_integrity=True)
            rat.to_pickle(ratios_path + str(update_granularity) + '.pkl')

            # Print update
    def update(timestamp, rolling_pos, rolling_dev, channel_pos, 
               channel_dev, correlation):
        # Update currency and ratios
        a, b = get_universe_singular(currencies, granularity)
        cu.loc[timestamp] = a
        ratios.loc[timestamp] = b   
        
        # Update rolling based on new currencies
        r_p, r_d = get_rolling(cu.tail(windows[-1] + 10), currencies, windows)
        rolling_pos = rolling_pos.append(r_p.loc[r_p.timestamp == timestamp], 
                                         ignore_index=True)
        rolling_dev = rolling_dev.append(r_d.loc[r_d.timestamp == timestamp], 
                                         ignore_index=True)

        # Update Correlation ased on new currencies
        c = get_correlation(cu.tail(windows[-1] + 10), currencies, windows)
        correlation = correlation.append(c.loc[c.timestamp == timestamp], 
                                         ignore_index=True)
    
        # Update channels based on new currencies
        ch_p, ch_d = get_channels(cu.tail(windows[-1] + 10), currencies, windows)
        channel_pos = channel_pos.append(ch_p.loc[ch_p.timestamp == timestamp], 
                                     ignore_index=True)
        channel_dev = channel_dev.append(ch_d.loc[ch_d.timestamp == timestamp], 
                                         ignore_index=True)
    
        # Export cu and ratios
        c = cu.stack().reset_index()
        r = ratios.stack().reset_index()
        c.columns = ['timestamp', 'currency', 'currency_values']
        r.columns = ['timestamp', 'instrument', 'instrument_values']
        c.to_csv(path + 'currencies.csv', index=False)  
        r.to_csv(path + 'ratios.csv', index=False)
        # Export indicators
        rolling_pos.to_csv(path + 'rolling_pos.csv', index=False)
        rolling_dev.to_csv(path + 'rolling_dev.csv', index=False)
        correlation.to_csv(path + 'correlation.csv', index=False)
        channel_pos.to_csv(path + 'channel_pos.csv', index=False)
        channel_dev.to_csv(path + 'channel_dev.csv', index=False)
        # Export Differences
        # rolling_pos_diff.to_csv(path + 'rolling_pos_diff.csv')
        #        rolling_dev_diff.to_csv(path + 'rolling_dev_diff.csv')
        #        channel_pos_diff.to_csv(path + 'channel_pos_diff.csv')
        #        channel_dev_diff.to_csv(path + 'channel_dev_diff.csv')
      
#        # Get Differences
#        rpd = get_diff(rolling_pos.tail(windows[-1] + 10), windows, instrument_list)
#        rdd = get_diff(rolling_dev.tail(windows[-1] + 10), windows, instrument_list)
#        cpd = get_diff(channel_pos.tail(windows[-1] + 10), windows, instrument_list)
#        cdd = get_diff(channel_dev.tail(windows[-1] + 10), windows, instrument_list)        


#        
        
        return {   
                'rolling_pos': rolling_pos,
                'rolling_dev': rolling_dev,
                'correlation': correlation,
                'channel_pos': channel_pos,
                'channel_dev': channel_dev
                }