date_list[df_daily_compressed_cnt] = prev_date
                last_list[df_daily_compressed_cnt] = prev_last
                volume_list[df_daily_compressed_cnt] = prev_volume

                df_daily_compressed_cnt += 1
                prev_volume = curr_volume

            prev_date = curr_date
            prev_last = curr_last

        date_list[df_daily_compressed_cnt] = curr_date
        last_list[df_daily_compressed_cnt] = curr_last
        volume_list[df_daily_compressed_cnt] = curr_volume

        df_daily_compressed = DataFrame({'Date': date_list, 'Last': last_list, 'Volume': volume_list})

        df_compressed = df_compressed.append(df_daily_compressed)

    # increment to next day
    start_stamp_utc += Day()

    # if start date is Thursday 5PM CST jump to Sunday 5PM CST
    if start_stamp_utc.weekday() == 4:
        start_stamp_utc += 2*Day()

futures_db.upload_dataframe_to_table(df_compressed, compressed_table_name)
futures_db.create_table_index(compressed_table_name, "Date")
futures_db.close_database_connection()


Beispiel #2
0
                volume_list[df_daily_compressed_cnt] = prev_volume

                df_daily_compressed_cnt += 1
                prev_volume = curr_volume

            prev_date = curr_date
            prev_last = curr_last

        date_list[df_daily_compressed_cnt] = curr_date
        last_list[df_daily_compressed_cnt] = curr_last
        volume_list[df_daily_compressed_cnt] = curr_volume

        df_daily_compressed = DataFrame({
            'Date': date_list,
            'Last': last_list,
            'Volume': volume_list
        })

        df_compressed = df_compressed.append(df_daily_compressed)

    # increment to next day
    start_stamp_utc += Day()

    # if start date is Thursday 5PM CST jump to Sunday 5PM CST
    if start_stamp_utc.weekday() == 4:
        start_stamp_utc += 2 * Day()

futures_db.upload_dataframe_to_table(df_compressed, compressed_table_name)
futures_db.create_table_index(compressed_table_name, "Date")
futures_db.close_database_connection()
instrument_list = ['GC', 'CL', 'ZB']

futures_db = FuturesDatabase()

for instrument in instrument_list:
    table_name = instrument + '_LAST'

    futures_db.drop_table_if_exist(table_name)

    futures_db.create_historical_table(table_name)

    rootPath = "/home/aouyang1/NinjaTrader/TickData/" + instrument
    folders = os.listdir(rootPath)

    fnames = os.listdir(rootPath)
    for fileNames in fnames:
        print fileNames
        df = pd.read_csv(rootPath + '/' + fileNames,
                         delimiter=";",
                         names=['Date', 'Last', 'Volume'],
                         parse_dates=[0],
                         date_parser=lambda x: datetime.datetime.strptime(
                             x, '%Y%m%d %H%M%S'))

        futures_db.upload_dataframe_to_table(df, table_name)

    futures_db.create_table_index(table_name, "Date")

futures_db.close_database_connection()
import pandas as pd

from util.futuresdatabase import FuturesDatabase


instrument_list = ['GC', 'CL', 'ZB']

futures_db = FuturesDatabase()

for instrument in instrument_list:
    table_name = instrument + '_LAST'

    futures_db.drop_table_if_exist(table_name)

    futures_db.create_historical_table(table_name)

    rootPath = "/home/aouyang1/NinjaTrader/TickData/" + instrument
    folders = os.listdir(rootPath)

    fnames = os.listdir(rootPath)
    for fileNames in fnames:
        print fileNames
        df = pd.read_csv(rootPath + '/' + fileNames, delimiter=";", names=['Date', 'Last', 'Volume'], parse_dates=[0],
                         date_parser=lambda x: datetime.datetime.strptime(x, '%Y%m%d %H%M%S'))

        futures_db.upload_dataframe_to_table(df, table_name)

    futures_db.create_table_index(table_name, "Date")

futures_db.close_database_connection()
Beispiel #5
0
        for fname in filelist:
            try:
                df = pd.read_csv(rootPath + '/' + fname, parse_dates=[8, 9],
                             date_parser=lambda x: datetime.datetime.strptime(x, '%m/%d/%Y %I:%M:%S %p'))
            except:
                df = pd.read_csv(rootPath + '/' + fname, parse_dates=[8, 9],
                             date_parser=lambda x: datetime.datetime.strptime(x, '%m/%d/%Y %I:%M'))
                print '{} in PL {} has a different time format...'.format(fname, PL)

            try:
                df.drop('Unnamed: 19', axis=1, inplace=True)
            except:
                print '{} in PL {} has no Unnamed: 19 column...'.format(fname, PL)

            df['PL'] = PL
            cols = df.columns.tolist()
            cols = cols[-1:] + cols[:-1]
            df = df[cols]
            df.columns = col_names

            fdb.upload_dataframe_to_table(df, table_name)

        stdout.write("\r%s" % table_name + " " + str(PL) + "/" + str(max(plrange)))
        stdout.flush()

    stdout.write("\n")
    fdb.create_table_index(table_name, "PL")

    print "---------------------"

fdb.close_database_connection()