def get_daily_pnl_snapshot(**kwargs):

    if "as_of_date" not in kwargs.keys():
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs["as_of_date"] = as_of_date
    else:
        as_of_date = kwargs["as_of_date"]

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext="ta")

    if os.path.isfile(ta_output_dir + "/portfolio_pnl.pkl"):
        strategy_frame = pd.read_pickle(ta_output_dir + "/portfolio_pnl.pkl")
        return strategy_frame

    strategy_frame = ts.get_open_strategies(**kwargs)
    pnl_output = [tapnl.get_strategy_pnl(alias=x, **kwargs) for x in strategy_frame["alias"]]

    strategy_frame["daily_pnl"] = [x["daily_pnl"] for x in pnl_output]
    strategy_frame["total_pnl"] = [x["total_pnl"] for x in pnl_output]

    strategy_frame = strategy_frame[["alias", "daily_pnl", "total_pnl"]]
    strategy_frame.sort("daily_pnl", ascending=False, inplace=True)
    strategy_frame.loc[max(strategy_frame.index) + 1] = [
        "TOTAL",
        strategy_frame["daily_pnl"].sum(),
        strategy_frame["total_pnl"].sum(),
    ]

    strategy_frame.to_pickle(ta_output_dir + "/portfolio_pnl.pkl")

    return strategy_frame
Esempio n. 2
0
def move_from_dated_folder_2daily_folder(**kwargs):

    ext = kwargs['ext']
    file_name_raw = kwargs['file_name']

    file_name_split = file_name_raw.split('.')

    if len(file_name_split) == 1:
        file_name = file_name_raw
        file_ext = '.xlsx'
    else:
        file_name = file_name_split[0]
        file_ext = file_name_split[1]

    if 'folder_date' in kwargs.keys():
        folder_date = kwargs['folder_date']
    else:
        folder_date = exp.doubledate_shift_bus_days()

    dated_folder = dn.get_dated_directory_extension(folder_date=folder_date,
                                                    ext=ext)

    sutil.copyfile(
        dated_folder + '/' + file_name + file_ext,
        daily_dir + '/' + file_name + '_' + str(folder_date) + file_ext)
Esempio n. 3
0
def get_liquid_spread_frame(**kwargs):

    ticker_head_list = cmi.futures_butterfly_strategy_tickerhead_list

    data_dir = dn.get_dated_directory_extension(ext='intraday_ttapi_data', folder_date=kwargs['settle_date'])
    file_name = 'ContractList.csv'

    data_frame_out = pd.read_csv(data_dir + '/' + file_name)
    data_frame_out_filtered = data_frame_out[(data_frame_out['ProductType'] == 'SPREAD')&
                                             ((data_frame_out['InstrumentName'].str.contains('Calendar')&
                                               (data_frame_out['MarketKey'] == 'CME')))|
                                             ((data_frame_out['InstrumentName'].str.contains('Spread')&
                                               (data_frame_out['MarketKey'] == 'ICE_IPE')))]

    num_contracts = len(data_frame_out_filtered.index)

    reformat_out_list = [tfl.get_ticker_from_tt_instrument_name_and_product_name(instrument_name=data_frame_out_filtered['InstrumentName'].iloc[x],
                                                        product_name =data_frame_out_filtered['ProductName'].iloc[x] ) for x in range(num_contracts)]

    data_frame_out_filtered['ticker'] = [reformat_out_list[x]['ticker'] for x in range(num_contracts)]
    data_frame_out_filtered['ticker_head'] = [reformat_out_list[x]['ticker_head'] for x in range(num_contracts)]

    selection_indx = [data_frame_out_filtered['ticker_head'].iloc[x] in ticker_head_list for x in range(num_contracts)]
    data_frame_out_filtered2 = data_frame_out_filtered[selection_indx]

    data_frame_out_filtered2.sort(['ticker_head','Volume'],ascending=[True, False],inplace=True)
    return data_frame_out_filtered2
def load_options_backtesting_data(**kwargs):

    ticker = kwargs['ticker']
    settle_date = kwargs['settle_date']
    delta = kwargs['delta']

    if 'model' in kwargs.keys():
        model = kwargs['model']
    else:
        model = 'BS'

    option_data_dir = dn.get_dated_directory_extension(folder_date=settle_date, ext='options_backtesting_data')
    file_dir = ticker + '_' + model + '_' + str(delta) + '.mat'

    output_dictionary = {'final_pnl5': np.NaN, 'final_pnl10': np.NaN, 'final_pnl20': np.NaN,
                             'accumulated_theta_5': np.NaN, 'accumulated_theta_10': np.NaN, 'accumulated_theta_20': np.NaN}

    if os.path.isfile(option_data_dir+'/'+file_dir):

        try:
            mat_output = scipy.io.loadmat(option_data_dir+'/'+file_dir)
            paper_trader_output = mat_output['paperTraderOutput']

            output_dictionary = {'final_pnl5': paper_trader_output['finalPnl5'][0][0][0][0],
                             'final_pnl10': paper_trader_output['finalPnl10'][0][0][0][0],
                             'final_pnl20': paper_trader_output['finalPnl20'][0][0][0][0],
                             'accumulated_theta_5': paper_trader_output['accumulatedTheta5'][0][0][0][0],
                             'accumulated_theta_10': paper_trader_output['accumulatedTheta10'][0][0][0][0],
                             'accumulated_theta_20': paper_trader_output['accumulatedTheta20'][0][0][0][0]}
        except Exception:
            print('Cannot load ' + file_dir + ' !')

    return output_dictionary
Esempio n. 5
0
def get_logger(**kwargs):

    now_datetime = dt.datetime.now()

    folder_string = dn.get_dated_directory_extension(
        ext='log', folder_date=int(now_datetime.strftime('%Y%m%d')))

    file_identifier = kwargs['file_identifier']

    if 'log_level' in kwargs.keys():
        log_level = kwargs['log_level']
    else:
        log_level = 'WARNING'

    if log_level.upper() == 'CRITICAL':
        log_level = lgg.CRITICAL
    elif log_level.upper() == 'ERROR':
        log_level = lgg.ERROR
    elif log_level.upper() == 'WARNING':
        log_level = lgg.WARNING
    elif log_level.upper() == 'INFO':
        log_level = lgg.INFO
    elif log_level.upper() == 'DEBUG':
        log_level = lgg.DEBUG

    logger = lgg.getLogger(__name__)
    logger.setLevel(log_level)

    handler = lgg.FileHandler(folder_string + '/' +
                              now_datetime.strftime('%Y%m%d_%H%M%S') + '_' +
                              file_identifier + '.log')
    handler.setLevel(log_level)

    logger.addHandler(handler)
    return logger
Esempio n. 6
0
def generate_historic_risk_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext='ta')

    historic_risk_output = hr.get_historical_risk_4open_strategies(**kwargs)

    strategy_risk_frame = historic_risk_output['strategy_risk_frame']
    ticker_head_risk_frame = historic_risk_output['ticker_head_risk_frame']

    writer = pd.ExcelWriter(ta_output_dir + '/risk.xlsx', engine='xlsxwriter')

    strategy_risk_frame.to_excel(writer, sheet_name='strategies')
    ticker_head_risk_frame.to_excel(writer, sheet_name='tickerHeads')

    worksheet_strategies = writer.sheets['strategies']
    worksheet_ticker_heads = writer.sheets['tickerHeads']

    worksheet_strategies.freeze_panes(1, 0)
    worksheet_ticker_heads.freeze_panes(1, 0)

    worksheet_strategies.autofilter(0, 0, len(strategy_risk_frame.index),
                              len(strategy_risk_frame.columns))

    worksheet_ticker_heads.autofilter(0, 0, len(ticker_head_risk_frame.index),
                                   len(ticker_head_risk_frame.columns))

    writer.save()
def load_options_backtesting_data(**kwargs):

    ticker = kwargs['ticker']
    settle_date = kwargs['settle_date']
    delta = kwargs['delta']

    if 'model' in kwargs.keys():
        model = kwargs['model']
    else:
        model = 'BS'

    option_data_dir = dn.get_dated_directory_extension(folder_date=settle_date, ext='options_backtesting_data')
    file_dir = ticker + '_' + model + '_' + str(delta) + '.mat'

    output_dictionary = {'final_pnl5': np.NaN, 'final_pnl10': np.NaN, 'final_pnl20': np.NaN,
                             'accumulated_theta_5': np.NaN, 'accumulated_theta_10': np.NaN, 'accumulated_theta_20': np.NaN}

    if os.path.isfile(option_data_dir+'/'+file_dir):

        try:
            mat_output = scipy.io.loadmat(option_data_dir+'/'+file_dir)
            paper_trader_output = mat_output['paperTraderOutput']

            output_dictionary = {'final_pnl5': paper_trader_output['finalPnl5'][0][0][0][0],
                             'final_pnl10': paper_trader_output['finalPnl10'][0][0][0][0],
                             'final_pnl20': paper_trader_output['finalPnl20'][0][0][0][0],
                             'accumulated_theta_5': paper_trader_output['accumulatedTheta5'][0][0][0][0],
                             'accumulated_theta_10': paper_trader_output['accumulatedTheta10'][0][0][0][0],
                             'accumulated_theta_20': paper_trader_output['accumulatedTheta20'][0][0][0][0]}
        except Exception:
            print('Cannot load ' + file_dir + ' !')

    return output_dictionary
def get_liquid_outright_futures_frame(**kwargs):

    ticker_head_list = list(set(cmi.futures_butterfly_strategy_tickerhead_list) | set(cmi.cme_futures_tickerhead_list))

    data_dir = dn.get_dated_directory_extension(ext="intraday_ttapi_data", folder_date=kwargs["settle_date"])
    file_name = "ContractList.csv"

    data_frame_out = pd.read_csv(data_dir + "/" + file_name)
    data_frame_out_filtered = data_frame_out[data_frame_out["ProductType"] == "FUTURE"]
    num_contracts = len(data_frame_out_filtered.index)

    reformat_out_list = [
        tfl.get_ticker_from_tt_instrument_name_and_product_name(
            instrument_name=data_frame_out_filtered["InstrumentName"].iloc[x],
            product_name=data_frame_out_filtered["ProductName"].iloc[x],
        )
        for x in range(num_contracts)
    ]

    data_frame_out_filtered["ticker"] = [reformat_out_list[x]["ticker"] for x in range(num_contracts)]
    data_frame_out_filtered["ticker_head"] = [reformat_out_list[x]["ticker_head"] for x in range(num_contracts)]

    selection_indx = [data_frame_out_filtered["ticker_head"].iloc[x] in ticker_head_list for x in range(num_contracts)]
    data_frame_out_filtered2 = data_frame_out_filtered[selection_indx]

    data_frame_out_filtered2.sort(["ticker_head", "Volume"], ascending=[True, False], inplace=True)
    return data_frame_out_filtered2.drop_duplicates("ticker_head")
Esempio n. 9
0
def send_wh_report(**kwargs):

    if 'report_date' in kwargs.keys():
        report_date = kwargs['report_date']
    else:
        report_date = exp.doubledate_shift_bus_days()

    ta_output_dir = dn.get_dated_directory_extension(folder_date=report_date, ext='ta')

    strategy_frame = tpm.get_daily_pnl_snapshot(as_of_date=report_date, name='final')
    total_pnl_row = strategy_frame[strategy_frame.alias == 'TOTAL']

    report_date_str = cu.convert_datestring_format({'date_string': str(report_date), 'format_from': 'yyyymmdd', 'format_to': 'dd/mm/yyyy'})

    config_output = su.read_config_file(file_name=dna.get_directory_name(ext='daily') + '/riskAndMargin.txt')

    email_text = "Expected Maximum Drawdown: "  + config_output['emd'] + "K" + \
                 "\nMargin: " + str(int(config_output['iceMargin']) + int(config_output['cmeMargin'])) + "K" + \
                 "\nNet Liquidating Value: " + config_output['pnl'] + "K" + \
                 "\n \nSee attached for individual strategy pnls."

    se.send_email_with_attachment(send_from='*****@*****.**',
                                  send_to=['*****@*****.**','*****@*****.**'],
                                  sender_account_alias='wh_trading',
                                  subject='Daily PnL for ' + report_date_str + ' is: ' + '${:,}'.format(total_pnl_row['daily_pnl'].iloc[0]),
                                  email_text=email_text,
                                  attachment_list = [ta_output_dir + '/' + 'pnl_final.xlsx'],
                                  attachment_name_list=['PnLs.xlsx'])
Esempio n. 10
0
def generate_spread_carry_followup_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    con = msu.get_my_sql_connection(**kwargs)

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date,
                                                     ext='ta')

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx',
                                engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [
        sc.convert_from_string_to_dictionary(
            string_input=strategy_frame['description_string'][x])
        ['strategy_class'] for x in range(len(strategy_frame.index))
    ]

    spread_carry_indx = [x == 'spread_carry' for x in strategy_class_list]
    spread_carry_frame = strategy_frame[spread_carry_indx]

    if spread_carry_frame.empty:
        return writer

    results = [
        sf.get_results_4strategy(
            alias=spread_carry_frame['alias'].iloc[x],
            strategy_info_output=spread_carry_frame.iloc[x],
            con=con) for x in range(len(spread_carry_frame.index))
    ]

    results_frame_list = [
        results[x]['results_frame'] for x in range(len(results))
        if results[x]['success']
    ]
    spread_carry_followup_frame = pd.concat(results_frame_list)

    spread_carry_followup_frame.to_excel(writer, sheet_name='sc')
    worksheet_sc = writer.sheets['sc']
    worksheet_sc.freeze_panes(1, 0)

    worksheet_sc.autofilter(0, 0, len(spread_carry_followup_frame.index),
                            len(spread_carry_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    return writer
Esempio n. 11
0
def get_tagged_tt_fills(**kwargs):

    fill_frame = load_latest_tt_fills(**kwargs)

    str_indx = fill_frame['Contract'].values[0].find('-')

    if str_indx == 2:
        date_format = '%y-%b'
    elif str_indx == -1:
        date_format = '%b%y'

    datetime_conversion = [dt.datetime.strptime(x,date_format) for x in fill_frame['Contract']]
    fill_frame['ticker_year'] = [x.year for x in datetime_conversion]
    fill_frame['ticker_month'] = [x.month for x in datetime_conversion]
    fill_frame['ticker_head'] = [conversion_from_tt_ticker_head[x] for x in fill_frame['Product']]

    fill_frame['ticker'] = [fill_frame.loc[x,'ticker_head'] +
                            cmi.full_letter_month_list[fill_frame.loc[x,'ticker_month']-1] +
                            str(fill_frame.loc[x,'ticker_year']) for x in fill_frame.index]

    fill_frame['trade_price'] = [convert_trade_price_from_tt(price=fill_frame.loc[x,'Price'],ticker_head=fill_frame.loc[x,'ticker_head'])
                                 for x in fill_frame.index]

    fill_frame['PQ'] = fill_frame['trade_price']*fill_frame['Qty']

    grouped = fill_frame.groupby(['ticker','B/S', 'Order Tag'])

    aggregate_trades = pd.DataFrame()
    aggregate_trades['trade_price'] = grouped['PQ'].sum()/grouped['Qty'].sum()
    aggregate_trades['trade_quantity'] = grouped['Qty'].sum()

    aggregate_trades.loc[(slice(None),'S'),'trade_quantity']=-aggregate_trades.loc[(slice(None),'S'),'trade_quantity']
    aggregate_trades['ticker'] = grouped['ticker'].first()
    aggregate_trades['ticker_head'] = grouped['ticker_head'].first()
    aggregate_trades['order_tag'] = grouped['Order Tag'].first()
    aggregate_trades['instrument'] = [product_type_instrument_conversion[x] for x in grouped['Product Type'].first()]

    aggregate_trades['option_type'] = None
    aggregate_trades['strike_price'] = None
    aggregate_trades['real_tradeQ'] = True

    ta_directory = dn.get_dated_directory_extension(ext='ta', folder_date=cu.get_doubledate())
    trade_alias_frame = pd.read_csv(ta_directory + '/tradeAlias.csv')

    combined_list = [None]*len(trade_alias_frame.index)

    for i in range(len(trade_alias_frame.index)):

        selected_trades = aggregate_trades[aggregate_trades['order_tag'] == trade_alias_frame['tag'].iloc[i]]
        combined_list[i] = selected_trades[['ticker','option_type','strike_price','trade_price','trade_quantity','instrument','real_tradeQ']]
        combined_list[i]['alias'] = trade_alias_frame['alias'].iloc[i]

    aggregate_trades = pd.concat(combined_list).reset_index(drop=True)

    return {'raw_trades': fill_frame, 'aggregate_trades': aggregate_trades}
def test_ib_order_follow_up():
    app = ib_order_follow_up()
    date_now = cu.get_doubledate()
    con = msu.get_my_sql_connection()
    delta_strategy_frame = ts.get_filtered_open_strategies(as_of_date=date_now, con=con, strategy_class_list=['delta'])
    app.delta_alias = delta_strategy_frame['alias'].iloc[-1]
    ta_folder = dn.get_dated_directory_extension(folder_date=date_now, ext='ta')
    app.trade_file = ta_folder + '/trade_dir.csv'
    app.con = con

    app.connect(client_id=7)
    app.run()
Esempio n. 13
0
def generate_underlying_proxy_report(**kwargs):

    con = msu.get_my_sql_connection(**kwargs)
    report_date = kwargs['report_date']
    futures_dataframe = cl.generate_futures_list_dataframe(date_to=report_date)
    futures_flat_curve = futures_dataframe[
        futures_dataframe['ticker_class'].isin(
            ['FX', 'Metal', 'Treasury', 'Index'])]
    futures_flat_curve.reset_index(drop=True, inplace=True)

    futures_flat_curve['tr_days_2roll'] = futures_flat_curve.apply(
        lambda x: exp.get_days2_roll(ticker=x['ticker'],
                                     instrument='Futures',
                                     date_to=report_date,
                                     con=con)['tr_days_2roll'],
        axis=1)

    futures_data_dictionary = {
        x: gfp.get_futures_price_preloaded(ticker_head=x)
        for x in futures_flat_curve['ticker_head'].unique()
    }

    proxy_output_list = [
        get_underlying_proxy_ticker(
            ticker=futures_flat_curve['ticker'].iloc[x],
            settle_date=report_date,
            con=con,
            futures_data_dictionary=futures_data_dictionary)
        for x in range(len(futures_flat_curve.index))
    ]

    futures_flat_curve['proxy_ticker'] = [
        x['ticker'] for x in proxy_output_list
    ]
    futures_flat_curve['add_2_proxy'] = [
        x['add_2_proxy'] for x in proxy_output_list
    ]

    ta_output_dir = dn.get_dated_directory_extension(folder_date=report_date,
                                                     ext='ta')
    writer = pd.ExcelWriter(ta_output_dir + '/proxy_report.xlsx',
                            engine='xlsxwriter')
    futures_flat_curve = futures_flat_curve[[
        'ticker', 'ticker_head', 'ticker_class', 'volume', 'tr_days_2roll',
        'proxy_ticker', 'add_2_proxy'
    ]]

    futures_flat_curve.to_excel(writer, sheet_name='proxy_report')
    writer.save()

    if 'con' not in kwargs.keys():
        con.close()
Esempio n. 14
0
def read_cme_option_settle_csv_files(**kwargs):

    file_name = kwargs['file_name']
    report_date = kwargs['report_date']

    options_data_dir = dn.get_dated_directory_extension(folder_date=report_date, ext='raw_options_data')

    with open(options_data_dir + '/' + file_name + '.pkl', 'rb') as handle:
        raw_data = pickle.load(handle)

    raw_data['ticker_head'] = [convert_from_cme_product_symbol_2tickerhead(x) for x in raw_data['PRODUCT SYMBOL']]

    return raw_data
Esempio n. 15
0
def get_ticker_frame(**kwargs):

    date = kwargs['date']

    folder_name = dn.get_dated_directory_extension(folder_date=date,
                                                   ext='binance')

    try:
        frame_out = pd.read_pickle(folder_name + '/ticker_frame.pkl')
    except:
        frame_out = pd.DataFrame(columns=['symbol'])

    return frame_out
def read_cme_option_settle_csv_files(**kwargs):

    file_name = kwargs['file_name']
    report_date = kwargs['report_date']

    options_data_dir = dn.get_dated_directory_extension(folder_date=report_date, ext='raw_options_data')

    with open(options_data_dir + '/' + file_name + '.pkl', 'rb') as handle:
        raw_data = pickle.load(handle)

    raw_data['ticker_head'] = [convert_from_cme_product_symbol_2tickerhead(x) for x in raw_data['PRODUCT SYMBOL']]

    return raw_data
Esempio n. 17
0
def send_followup_report(**kwargs):

    if 'report_date' in kwargs.keys():
        report_date = kwargs['report_date']
    else:
        report_date = exp.doubledate_shift_bus_days()

    ta_output_dir = dn.get_dated_directory_extension(folder_date=report_date,ext='ta')

    se.send_email_with_attachment(send_from='*****@*****.**',
                                  send_to='*****@*****.**',
                                  sender_account_alias='wh_trading',
                                  subject='followup_' + str(report_date),
                                  attachment_list = [ta_output_dir + '/' + 'followup.xlsx'],
                                  attachment_name_list=['Followup.xlsx'])
Esempio n. 18
0
def save_ticker_frame(**kwargs):

    if 'client' in kwargs.keys():
        client = kwargs['client']
    else:
        client = btu.get_binance_client()

    folder_date = cu.get_doubledate()

    folder_name = dn.get_dated_directory_extension(folder_date=folder_date,
                                                   ext='binance')
    tickers = client.get_ticker()
    ticker_frame = pd.DataFrame(tickers)

    ticker_frame.to_pickle(folder_name + '/ticker_frame.pkl')
def get_binance_price_preloaded(**kwargs):

    interval = kwargs['interval']
    ticker = kwargs['ticker']
    date_from = kwargs['date_from']
    date_to = kwargs['date_to']

    file_name = ticker + '_1h.pkl'

    datetime_from = cu.convert_doubledate_2datetime(date_from)
    datetime_to = cu.convert_doubledate_2datetime(date_to)

    x = datetime_from

    price_frame_list = []

    while x <= datetime_to:

        folder_name = dn.get_dated_directory_extension(folder_date=int(
            x.strftime('%Y%m%d')),
                                                       ext='binance')
        dated_file_name = folder_name + '/' + file_name
        if os.path.isfile(dated_file_name):
            price_frame_list.append(pd.read_pickle(dated_file_name))

        x = x + dt.timedelta(days=1)

    if len(price_frame_list) == 0:
        return pd.DataFrame()
    merged_data = pd.concat(price_frame_list)

    if len(merged_data.index) == 0:
        return pd.DataFrame()

    merged_data.set_index('openDatetime', drop=True, inplace=True)

    if interval.upper() != '1H':
        data_out = pd.DataFrame()
        data_out['open'] = merged_data['open'].resample('4H').first()
        data_out['close'] = merged_data['close'].resample('4H').last()
        data_out['high'] = merged_data['high'].resample('4H').max()
        data_out['low'] = merged_data['low'].resample('4H').min()
        data_out['volume'] = merged_data['volume'].resample('4H').sum()
    else:
        data_out = merged_data

    return data_out
def download_coinbase_data_4date(**kwargs):

    utc_date = kwargs['utc_date']
    print(utc_date)
    candlestick_minutes = kwargs['candlestick_minutes']
    ticker = kwargs['ticker']

    coinbase_data_dir = dn.get_dated_directory_extension(folder_date=utc_date,
                                                         ext='coinbase_data')
    file_name = coinbase_data_dir + '/' + ticker + '_' + str(
        candlestick_minutes) + '.pkl'

    if os.path.isfile(file_name):
        return pd.read_pickle(file_name)

    if 'coin_client' in kwargs.keys():
        coin_client = kwargs['coin_client']
    else:
        coin_client = coin_util.get_coinbase_client()

    start_date = cu.convert_doubledate_2datetime(utc_date)
    start_date = start_date.replace(hour=0, minute=0)
    end_date = start_date + dt.timedelta(days=1)
    end_date = end_date.replace(hour=0, minute=5)

    date_raw = coin_client.get_product_historic_rates(
        ticker,
        granularity=candlestick_minutes * 60,
        start=start_date,
        end=end_date)
    time.sleep(0.5)

    frame_out = pd.DataFrame(
        date_raw, columns=['time', 'low', 'high', 'open', 'close', 'volume'])

    frame_out['time'] = [
        dt.datetime.utcfromtimestamp(x).replace(tzinfo=pytz.utc)
        for x in frame_out['time']
    ]

    frame_out.sort_values(by='time', ascending=True, inplace=True)
    frame_out.reset_index(drop=True, inplace=True)
    frame_out.to_pickle(file_name)

    return frame_out
def generate_spread_carry_followup_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    con = msu.get_my_sql_connection(**kwargs)

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext='ta')

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx', engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [sc.convert_from_string_to_dictionary(string_input=strategy_frame['description_string'][x])['strategy_class']
                           for x in range(len(strategy_frame.index))]

    spread_carry_indx = [x == 'spread_carry' for x in strategy_class_list]
    spread_carry_frame = strategy_frame[spread_carry_indx]

    results = [sf.get_results_4strategy(alias=spread_carry_frame['alias'].iloc[x],
                                        strategy_info_output=spread_carry_frame.iloc[x],
                                        con=con)
               for x in range(len(spread_carry_frame.index))]

    results_frame_list = [results[x]['results_frame'] for x in range(len(results)) if results[x]['success']]
    spread_carry_followup_frame = pd.concat(results_frame_list)

    spread_carry_followup_frame.to_excel(writer, sheet_name='sc')
    worksheet_sc = writer.sheets['sc']
    worksheet_sc.freeze_panes(1, 0)

    worksheet_sc.autofilter(0, 0, len(spread_carry_followup_frame.index),
                              len(spread_carry_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    return writer
def generate_portfolio_pnl_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext='ta')

    daily_pnl_frame = tpm.get_daily_pnl_snapshot(**kwargs)

    writer = pd.ExcelWriter(ta_output_dir + '/pnl.xlsx', engine='xlsxwriter')
    daily_pnl_frame.to_excel(writer, sheet_name='strategies')
    worksheet_strategies = writer.sheets['strategies']

    worksheet_strategies.set_column('B:B', 30)
    worksheet_strategies.freeze_panes(1, 0)
    worksheet_strategies.autofilter(0, 0, len(daily_pnl_frame.index),
                              len(daily_pnl_frame.columns))
Esempio n. 23
0
def save_price_data(**kwargs):

    if 'client' in kwargs.keys():
        client = kwargs['client']
    else:
        client = btu.get_binance_client()

    interval = kwargs['interval']
    ticker = kwargs['ticker']
    date_from = kwargs['date_from']
    date_to = kwargs['date_to']

    file_name = ticker + '_' + interval + '.pkl'

    datetime_from = cu.convert_doubledate_2datetime(date_from)
    datetime_to = cu.convert_doubledate_2datetime(date_to)

    x = datetime_from

    while x <= datetime_to:

        xplus = x + dt.timedelta(days=1)

        folder_name = dn.get_dated_directory_extension(folder_date=int(
            x.strftime('%Y%m%d')),
                                                       ext='binance')
        dated_file_name = folder_name + '/' + file_name
        if os.path.isfile(dated_file_name):
            price_frame = pd.read_pickle(dated_file_name)
            print(len(price_frame.index))
        else:
            price_frame = gbp.get_klines(ticker=ticker,
                                         interval=interval,
                                         start_str=x.strftime('%m/%d/%y'),
                                         end_str=xplus.strftime('%m/%d/%y'),
                                         client=client)
            price_frame = price_frame[price_frame['openDate'] == x.date()]
            price_frame.to_pickle(dated_file_name)
            tm.sleep(0.5)

        x = xplus
def load_csv_file_4ticker(**kwargs):

    ticker = kwargs['ticker']

    if 'folder_date' in kwargs.keys():
        folder_date = kwargs['folder_date']
    else:
        folder_date = exp.doubledate_shift_bus_days()

    data_dir = dn.get_dated_directory_extension(ext='intraday_ttapi_data', folder_date=folder_date)

    file_name = get_ttapi_filename(ticker=ticker)

    if os.path.isfile(data_dir + '/' + file_name):
        data_frame_out = pd.read_csv(data_dir + '/' + file_name,names=['time','field','value'],dtype={2: 'str'})
        data_frame_out['time'] = pd.to_datetime(data_frame_out['time'])
    else:
        data_frame_out = pd.DataFrame(columns=['time','field','value'])


    return data_frame_out
Esempio n. 25
0
def main():

    app = dh_algo.Algo()
    con = msu.get_my_sql_connection()
    date_now = cu.get_doubledate()

    contract_frame = tsh.get_intraday_data_contract_frame(con=con)

    contract_frame = dh_ut.calculate_contract_risk(
        contract_frame=contract_frame, current_date=date_now)

    contract_frame['bid_p'] = np.nan
    contract_frame['ask_p'] = np.nan
    contract_frame['mid_price'] = np.nan
    contract_frame['spread_cost'] = np.nan

    contract_frame['bid_q'] = np.nan
    contract_frame['ask_q'] = np.nan

    contract_frame_outright = contract_frame[contract_frame['is_spread_q'] ==
                                             False]
    outright_ticker_list = list(contract_frame_outright['ticker'].values)
    contract_frame_spread = contract_frame[contract_frame['is_spread_q']]

    ta_folder = dn.get_dated_directory_extension(folder_date=date_now,
                                                 ext='ta')

    app.ticker_list = outright_ticker_list
    app.price_request_dictionary['spread'] = list(
        contract_frame_spread['ticker'].values)
    app.price_request_dictionary['outright'] = outright_ticker_list
    app.contract_frame = contract_frame

    app.log = lg.get_logger(file_identifier='ib_delta_hedge', log_level='INFO')
    app.trade_file = ta_folder + '/trade_dir.csv'
    app.delta_alias = tsh.get_delta_strategy_alias(con=con)
    app.con = con
    app.current_date = date_now
    app.connect(client_id=6)
    app.run()
Esempio n. 26
0
def get_liquid_outright_futures_frame(**kwargs):

    ticker_head_list = list(set(cmi.futures_butterfly_strategy_tickerhead_list) | set(cmi.cme_futures_tickerhead_list))

    data_dir = dn.get_dated_directory_extension(ext='intraday_ttapi_data', folder_date=kwargs['settle_date'])
    file_name = 'ContractList.csv'

    data_frame_out = pd.read_csv(data_dir + '/' + file_name)
    data_frame_out_filtered = data_frame_out[data_frame_out['ProductType'] == 'FUTURE']
    num_contracts = len(data_frame_out_filtered.index)

    reformat_out_list = [tfl.get_ticker_from_tt_instrument_name_and_product_name(instrument_name=data_frame_out_filtered['InstrumentName'].iloc[x],
                                                        product_name =data_frame_out_filtered['ProductName'].iloc[x] ) for x in range(num_contracts)]

    data_frame_out_filtered['ticker'] = [reformat_out_list[x]['ticker'] for x in range(num_contracts)]
    data_frame_out_filtered['ticker_head'] = [reformat_out_list[x]['ticker_head'] for x in range(num_contracts)]

    selection_indx = [data_frame_out_filtered['ticker_head'].iloc[x] in ticker_head_list for x in range(num_contracts)]
    data_frame_out_filtered2 = data_frame_out_filtered[selection_indx]

    data_frame_out_filtered2.sort_values(['ticker_head','Volume'],ascending=[True, False],inplace=True)
    return data_frame_out_filtered2.drop_duplicates('ticker_head')
def get_daily_pnl_snapshot(**kwargs):

    if 'as_of_date' not in kwargs.keys():
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date
    else:
        as_of_date = kwargs['as_of_date']

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date,
                                                     ext='ta')

    file_name = '/portfolio_pnl_' + kwargs['name'] + '.pkl'

    if os.path.isfile(ta_output_dir + file_name):
        strategy_frame = pd.read_pickle(ta_output_dir + file_name)
        return strategy_frame

    strategy_frame = ts.get_open_strategies(**kwargs)
    pnl_output = [
        tapnl.get_strategy_pnl(alias=x, **kwargs)
        for x in strategy_frame['alias']
    ]

    strategy_frame['daily_pnl'] = [x['daily_pnl'] for x in pnl_output]
    strategy_frame['total_pnl'] = [x['total_pnl'] for x in pnl_output]

    strategy_frame = strategy_frame[['alias', 'daily_pnl', 'total_pnl']]
    strategy_frame.sort_values('daily_pnl', ascending=False, inplace=True)

    if len(strategy_frame.index) > 0:
        strategy_frame.loc[max(strategy_frame.index) + 1] = [
            'TOTAL', strategy_frame['daily_pnl'].sum(),
            strategy_frame['total_pnl'].sum()
        ]

    strategy_frame.to_pickle(ta_output_dir + file_name)

    return strategy_frame
Esempio n. 28
0
def load_csv_file_4ticker(**kwargs):

    ticker = kwargs['ticker']

    if 'folder_date' in kwargs.keys():
        folder_date = kwargs['folder_date']
    else:
        folder_date = exp.doubledate_shift_bus_days()

    data_dir = dn.get_dated_directory_extension(ext='intraday_ttapi_data',
                                                folder_date=folder_date)

    file_name = tfl.convert_ticker_from_db2tt(ticker) + '.csv'
    #print(file_name)

    if os.path.isfile(data_dir + '/' + file_name):
        data_frame_out = pd.read_csv(data_dir + '/' + file_name,
                                     names=['time', 'field', 'value'],
                                     dtype={2: 'str'})
        data_frame_out['time'] = pd.to_datetime(data_frame_out['time'])
    else:
        data_frame_out = pd.DataFrame(columns=['time', 'field', 'value'])

    return data_frame_out
Esempio n. 29
0
def move_from_dated_folder_2daily_folder(**kwargs):

    ext = kwargs["ext"]
    file_name_raw = kwargs["file_name"]

    file_name_split = file_name_raw.split(".")

    if len(file_name_split) == 1:
        file_name = file_name_raw
        file_ext = ".xlsx"
    else:
        file_name = file_name_split[0]
        file_ext = file_name_split[1]

    if "folder_date" in kwargs.keys():
        folder_date = kwargs["folder_date"]
    else:
        folder_date = exp.doubledate_shift_bus_days()

    dated_folder = dn.get_dated_directory_extension(folder_date=folder_date, ext=ext)

    sutil.copyfile(
        dated_folder + "/" + file_name + file_ext, daily_dir + "/" + file_name + "_" + str(folder_date) + file_ext
    )
def get_presaved_coinbase_data(**kwargs):

    date_to = kwargs["date_to"]
    num_days_back = kwargs["num_days_back"]
    datetime_to = cu.convert_doubledate_2datetime(date_to)
    date_list = [
        int((datetime_to - dt.timedelta(days=x)).strftime('%Y%m%d'))
        for x in range(0, num_days_back)
    ]
    data_list = []

    for i in range(len(date_list)):

        coinbase_data_dir = dn.get_dated_directory_extension(
            folder_date=date_list[i], ext='coinbase_data')
        data_list.append(
            pd.read_pickle(coinbase_data_dir + '/' + kwargs["ticker"] + '_' +
                           str(kwargs["candlestick_minutes"]) + '.pkl'))

    frame_out = pd.concat(data_list)

    frame_out.drop_duplicates(subset=['time'], keep='first', inplace=True)
    frame_out.sort_values(by='time', ascending=True, inplace=True)
    return frame_out.reset_index(drop=True, inplace=False)
def generate_futures_butterfly_followup_report(**kwargs):

    con = msu.get_my_sql_connection(**kwargs)

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext='ta')
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx', engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [sc.convert_from_string_to_dictionary(string_input=strategy_frame['description_string'][x])['strategy_class']
                           for x in range(len(strategy_frame.index))]

    futures_butterfly_indx = [x == 'futures_butterfly' for x in strategy_class_list]

    futures_butterfly_frame = strategy_frame[futures_butterfly_indx]

    results = [sf.get_results_4strategy(alias=futures_butterfly_frame['alias'].iloc[x],
                                        strategy_info_output=futures_butterfly_frame.iloc[x])
               for x in range(len(futures_butterfly_frame.index))]

    butterfly_followup_frame = pd.DataFrame(results)
    butterfly_followup_frame['alias'] = futures_butterfly_frame['alias'].values

    pnl_frame = pm.get_daily_pnl_snapshot(as_of_date=as_of_date, con=con)
    risk_output = hr.get_historical_risk_4open_strategies(as_of_date=as_of_date, con=con)

    merged_frame1 = pd.merge(butterfly_followup_frame,pnl_frame, how='left', on='alias')
    merged_frame2 = pd.merge(merged_frame1, risk_output['strategy_risk_frame'], how='left', on='alias')

    butterfly_followup_frame = merged_frame2[['alias', 'ticker_head', 'holding_tr_dte', 'short_tr_dte',
                                                         'z1_initial', 'z1', 'QF_initial', 'QF',
                                                         'total_pnl', 'downside','recommendation']]

    butterfly_followup_frame.rename(columns={'alias': 'Alias', 'ticker_head': 'TickerHead',
                                             'holding_tr_dte': 'HoldingTrDte', 'short_tr_dte': 'ShortTrDte',
                                             'z1_initial': 'Z1Initial', 'z1': 'Z1',
                                             'QF_initial': 'QFInitial','total_pnl': 'TotalPnl',
                                             'downside': 'Downside','recommendation':'Recommendation'}, inplace=True)

    butterfly_followup_frame.sort('QF', ascending=False,inplace=True)

    butterfly_followup_frame['Z1'] = butterfly_followup_frame['Z1'].round(2)

    butterfly_followup_frame.to_excel(writer, sheet_name='butterflies')
    worksheet_butterflies = writer.sheets['butterflies']

    worksheet_butterflies.set_column('B:B', 26)
    worksheet_butterflies.freeze_panes(1, 0)

    worksheet_butterflies.autofilter(0, 0, len(butterfly_followup_frame.index),
                              len(butterfly_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    return writer
def generate_vcs_followup_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext='ta')

    con = msu.get_my_sql_connection(**kwargs)

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx', engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [sc.convert_from_string_to_dictionary(string_input=strategy_frame['description_string'][x])['strategy_class']
                           for x in range(len(strategy_frame.index))]

    vcs_indx = [x == 'vcs' for x in strategy_class_list]
    vcs_frame = strategy_frame[vcs_indx]

    results = [sf.get_results_4strategy(alias=vcs_frame['alias'].iloc[x],
                                        strategy_info_output=vcs_frame.iloc[x])
               for x in range(len(vcs_frame.index))]

    vcs_followup_frame = pd.DataFrame(results)
    vcs_followup_frame['alias'] = vcs_frame['alias'].values

    pnl_frame = pm.get_daily_pnl_snapshot(**kwargs)
    merged_frame1 = pd.merge(vcs_followup_frame,pnl_frame, how='left', on='alias')

    vcs_followup_frame = merged_frame1[['alias', 'last_adjustment_days_ago','min_tr_dte', 'long_short_ratio',
                   'net_oev', 'net_theta', 'long_oev', 'short_oev', 'favQMove', 'total_pnl','recommendation']]

    vcs_followup_frame['long_short_ratio'] = vcs_followup_frame['long_short_ratio'].round()
    vcs_followup_frame['net_oev'] = vcs_followup_frame['net_oev'].round(1)
    vcs_followup_frame['long_oev'] = vcs_followup_frame['long_oev'].round(1)
    vcs_followup_frame['short_oev'] = vcs_followup_frame['short_oev'].round(1)
    vcs_followup_frame['net_theta'] = vcs_followup_frame['net_theta'].round(1)

    vcs_followup_frame.sort('total_pnl', ascending=False, inplace=True)
    vcs_followup_frame.reset_index(drop=True,inplace=True)
    vcs_followup_frame.loc[len(vcs_followup_frame.index)] = ['TOTAL', None, None, None, None, vcs_followup_frame['net_theta'].sum(),
                                                             None, None, None, vcs_followup_frame['total_pnl'].sum(), None]

    vcs_followup_frame.to_excel(writer, sheet_name='vcs')
    worksheet_vcs = writer.sheets['vcs']
    worksheet_vcs.set_column('B:B', 18)
    worksheet_vcs.freeze_panes(1, 0)

    worksheet_vcs.autofilter(0, 0, len(vcs_followup_frame.index),
                              len(vcs_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    writer.save()
Esempio n. 33
0
def main():
    app = algo.Algo()
    report_date = exp.doubledate_shift_bus_days()
    todays_date = cu.get_doubledate()
    con = msu.get_my_sql_connection()
    vcs_output = vcs.generate_vcs_sheet_4date(date_to=report_date)
    vcs_pairs = vcs_output['vcs_pairs']

    filter_out = of.get_vcs_filters(data_frame_input=vcs_pairs,
                                    filter_list=['long2', 'short2'])
    vcs_pairs = filter_out['selected_frame']

    vcs_pairs = vcs_pairs[vcs_pairs['downside'].notnull()
                          & vcs_pairs['upside'].notnull()]
    # &(vcs_pairs.tickerClass!='Energy')
    vcs_pairs = vcs_pairs[(vcs_pairs['trDte1'] >= 50)
                          & (vcs_pairs.tickerClass != 'Metal') &
                          (vcs_pairs.tickerClass != 'FX') &
                          (vcs_pairs.tickerClass != 'Energy')]
    vcs_pairs = vcs_pairs[((vcs_pairs['Q'] <= 30) &
                           (vcs_pairs['fwdVolQ'] >= 30)) |
                          ((vcs_pairs['Q'] >= 70) &
                           (vcs_pairs['fwdVolQ'] <= 70))]
    vcs_pairs.reset_index(drop=True, inplace=True)

    vcs_pairs['underlying_ticker1'] = [
        omu.get_option_underlying(ticker=x) for x in vcs_pairs['ticker1']
    ]
    vcs_pairs['underlying_ticker2'] = [
        omu.get_option_underlying(ticker=x) for x in vcs_pairs['ticker2']
    ]

    vcs_pairs['underlying_tickerhead'] = [
        cmi.get_contract_specs(x)['ticker_head']
        for x in vcs_pairs['underlying_ticker1']
    ]
    futures_data_dictionary = {
        x: gfp.get_futures_price_preloaded(ticker_head=x)
        for x in vcs_pairs['underlying_tickerhead'].unique()
    }

    proxy_output_list1 = [
        up.get_underlying_proxy_ticker(
            ticker=x,
            settle_date=report_date,
            futures_data_dictionary=futures_data_dictionary)
        for x in vcs_pairs['underlying_ticker1']
    ]
    vcs_pairs['proxy_ticker1'] = [x['ticker'] for x in proxy_output_list1]
    vcs_pairs['add_2_proxy1'] = [x['add_2_proxy'] for x in proxy_output_list1]

    proxy_output_list2 = [
        up.get_underlying_proxy_ticker(
            ticker=x,
            settle_date=report_date,
            futures_data_dictionary=futures_data_dictionary)
        for x in vcs_pairs['underlying_ticker2']
    ]
    vcs_pairs['proxy_ticker2'] = [x['ticker'] for x in proxy_output_list2]
    vcs_pairs['add_2_proxy2'] = [x['add_2_proxy'] for x in proxy_output_list2]

    vcs_pairs['expiration_date1'] = [
        int(
            exp.get_expiration_from_db(instrument='options', ticker=x,
                                       con=con).strftime('%Y%m%d'))
        for x in vcs_pairs['ticker1']
    ]
    vcs_pairs['expiration_date2'] = [
        int(
            exp.get_expiration_from_db(instrument='options', ticker=x,
                                       con=con).strftime('%Y%m%d'))
        for x in vcs_pairs['ticker2']
    ]

    vcs_pairs['interest_date1'] = [
        grfs.get_simple_rate(as_of_date=report_date, date_to=x)['rate_output']
        for x in vcs_pairs['expiration_date1']
    ]
    vcs_pairs['interest_date2'] = [
        grfs.get_simple_rate(as_of_date=report_date, date_to=x)['rate_output']
        for x in vcs_pairs['expiration_date2']
    ]
    vcs_pairs['exercise_type'] = [
        cmi.get_option_exercise_type(ticker_head=x)
        for x in vcs_pairs['tickerHead']
    ]

    admin_dir = dna.get_directory_name(ext='admin')
    risk_file_out = su.read_text_file(file_name=admin_dir +
                                      '/RiskParameter.txt')
    vcs_risk_parameter = 5 * 2 * float(risk_file_out[0])

    vcs_pairs['long_quantity'] = vcs_risk_parameter / abs(
        vcs_pairs['downside'])
    vcs_pairs['short_quantity'] = vcs_risk_parameter / vcs_pairs['upside']
    vcs_pairs['long_quantity'] = vcs_pairs['long_quantity'].round()
    vcs_pairs['short_quantity'] = vcs_pairs['short_quantity'].round()

    vcs_pairs['alias'] = [
        generate_vcs_alias(vcs_row=vcs_pairs.iloc[x])
        for x in range(len(vcs_pairs.index))
    ]

    vcs_pairs['call_mid_price1'] = np.nan
    vcs_pairs['put_mid_price1'] = np.nan
    vcs_pairs['call_mid_price2'] = np.nan
    vcs_pairs['put_mid_price2'] = np.nan
    vcs_pairs['call_iv1'] = np.nan
    vcs_pairs['put_iv1'] = np.nan
    vcs_pairs['call_iv2'] = np.nan
    vcs_pairs['put_iv2'] = np.nan
    vcs_pairs['underlying_mid_price1'] = np.nan
    vcs_pairs['underlying_mid_price2'] = np.nan
    vcs_pairs['proxy_mid_price1'] = np.nan
    vcs_pairs['proxy_mid_price2'] = np.nan
    vcs_pairs['current_strike1'] = np.nan
    vcs_pairs['current_strike2'] = np.nan

    ta_folder = dn.get_dated_directory_extension(folder_date=todays_date,
                                                 ext='ta')

    app.vcs_pairs = vcs_pairs
    app.con = con
    app.futures_data_dictionary = futures_data_dictionary
    app.report_date = report_date
    app.todays_date = todays_date
    app.log = lg.get_logger(file_identifier='vcs', log_level='INFO')
    app.trade_file = ta_folder + '/trade_dir.csv'
    app.vcs_risk_parameter = vcs_risk_parameter
    app.connect(client_id=3)
    app.run()
Esempio n. 34
0
def get_book_snapshot(**kwargs):

    ticker = kwargs['ticker']
    utc_doubledate = kwargs['utc_doubledate']

    if 'freq_str' in kwargs.keys():
        freq_str = kwargs['freq_str']
    else:
        freq_str = '5T'

    contract_specs_output = cmi.get_contract_specs(ticker)
    ticker_head = contract_specs_output['ticker_head']

    output_dir = dn.get_dated_directory_extension(ext='book_snapshot_data', folder_date=utc_doubledate)
    file_name = output_dir + '/' + ticker + '_' + freq_str + '.pkl'

    if os.path.isfile(file_name):
        return pd.read_pickle(file_name)

    data_out = get_tick_data(**kwargs)

    if len(data_out.index)==0:
        return pd.DataFrame(columns=['mid_p','buy_volume','sell_volume','volume', 'hour_minute'])

    utc_year = m.floor(utc_doubledate / 10000)

    if utc_year in [2017, 2018]:
        p_multiplier = price_multiplier.get(ticker_head, 1)
    else:
        p_multiplier = price_multiplier_old.get(ticker_head, 1)


    if 'LocalTime' in data_out.columns:

        data_out['hour'] = data_out['LocalTime'] / 1e7
        data_out['hour'] = data_out['hour'].apply(np.floor)

        data_out['minute'] = data_out['LocalTime'] / 1e5
        data_out['minute'] = data_out['minute'].apply(np.floor)
        data_out['minute'] = data_out['minute'] - 1e2 * data_out['hour']

        data_out['second'] = data_out['LocalTime'] / 1e3
        data_out['second'] = data_out['second'].apply(np.floor)
        data_out['second'] = data_out['second'] - 1e4 * data_out['hour']-1e2*data_out['minute']

        data_out['year'] = data_out['LocalDate'] / 1e4
        data_out['year'] = data_out['year'].apply(np.floor)

        data_out['month'] = data_out['LocalDate'] / 1e2
        data_out['month'] = data_out['month'].apply(np.floor)
        data_out['month'] = data_out['month'] - 1e2 * data_out['year']

        data_out['day'] = data_out['LocalDate'] - 1e4 * data_out['year'] - 1e2 * data_out['month']

        data_out['date_time'] = [dt.datetime(int(data_out['year'].iloc[x]), int(data_out['month'].iloc[x]), int(data_out['day'].iloc[x]),
                        int(data_out['hour'].iloc[x]), int(data_out['minute'].iloc[x]), int(data_out['second'].iloc[x])) for x in range(len(data_out.index))]

    else:

        data_out['hour_utc'] = data_out['Timestamp'] / 1e7
        data_out['hour_utc'] = data_out['hour_utc'].apply(np.floor)

        data_out['minute'] = data_out['Timestamp'] / 1e5
        data_out['minute'] = data_out['minute'].apply(np.floor)
        data_out['minute'] = data_out['minute'] - 1e2 * data_out['hour_utc']

        data_out['second'] = data_out['Timestamp'] / 1e3
        data_out['second'] = data_out['second'].apply(np.floor)
        data_out['second'] = data_out['second'] - 1e4 * data_out['hour_utc'] - 1e2 * data_out['minute']

        data_out['year'] = data_out['Date'] / 1e4
        data_out['year'] = data_out['year'].apply(np.floor)

        data_out['month'] = data_out['Date'] / 1e2
        data_out['month'] = data_out['month'].apply(np.floor)
        data_out['month'] = data_out['month'] - 1e2 * data_out['year']

        data_out['day'] = data_out['Date'] - 1e4 * data_out['year'] - 1e2 * data_out['month']

        data_out['date_time'] = [
            dt.datetime(int(data_out['year'].iloc[x]), int(data_out['month'].iloc[x]), int(data_out['day'].iloc[x]),
                        int(data_out['hour_utc'].iloc[x]), int(data_out['minute'].iloc[x]), int(data_out['second'].iloc[x]),
                        tzinfo=pytz.utc).astimezone(pytz.timezone('US/Central')).replace(tzinfo=None)
            for x in range(len(data_out.index))]

    merged_index = pd.date_range(start=data_out['date_time'].iloc[0].replace(second=0), end=data_out['date_time'].iloc[-1].replace(second=0), freq='S')
    data_out.set_index('date_time', inplace=True, drop=True)

    if 'Type' in data_out.columns:
        type_column_name = 'Type'
    elif 'EventType' in data_out.columns:
        type_column_name = 'EventType'

    bid_data = data_out[data_out[type_column_name] == 'QUOTE BID']
    bid_data = bid_data.groupby(bid_data.index).last()
    bid_data = bid_data.reindex(merged_index, method='pad')

    ask_data = data_out[data_out[type_column_name] == 'QUOTE SELL']
    ask_data = ask_data.groupby(ask_data.index).last()
    ask_data = ask_data.reindex(merged_index, method='pad')

    buy_trade_data = data_out[data_out[type_column_name] == 'TRADE AGRESSOR ON BUY']
    buy_trade_data['CumQuantity'] = buy_trade_data['Quantity'].cumsum()
    buy_trade_data = buy_trade_data.groupby(buy_trade_data.index).last()
    buy_trade_data = buy_trade_data.reindex(merged_index, method='pad')
    buy_trade_data['Quantity'] = buy_trade_data['CumQuantity'].diff()

    sell_trade_data = data_out[data_out[type_column_name] == 'TRADE AGRESSOR ON SELL']
    sell_trade_data['CumQuantity'] = sell_trade_data['Quantity'].cumsum()
    sell_trade_data = sell_trade_data.groupby(sell_trade_data.index).last()
    sell_trade_data = sell_trade_data.reindex(merged_index, method='pad')
    sell_trade_data['Quantity'] = sell_trade_data['CumQuantity'].diff()

    trade_data = data_out[data_out[type_column_name] == 'TRADE']
    trade_data['CumQuantity'] = trade_data['Quantity'].cumsum()
    trade_data = trade_data.groupby(trade_data.index).last()
    trade_data = trade_data.reindex(merged_index, method='pad')
    trade_data['Quantity'] = trade_data['CumQuantity'].diff()

    book_snapshot = pd.DataFrame(index=merged_index)

    book_snapshot['bid_p'] = bid_data['Price']
    book_snapshot['bid_p'] = book_snapshot['bid_p']*p_multiplier
    book_snapshot['bid_q'] = bid_data['Quantity']

    book_snapshot['ask_p'] = ask_data['Price']
    book_snapshot['ask_p'] = book_snapshot['ask_p'] * p_multiplier
    book_snapshot['ask_q'] = ask_data['Quantity']

    book_snapshot['buy_volume'] = buy_trade_data['Quantity']
    book_snapshot['sell_volume'] = sell_trade_data['Quantity']
    book_snapshot['volume'] = trade_data['Quantity']

    book_snapshot['mid_p'] = (book_snapshot['bid_p'] * book_snapshot['bid_q'] + book_snapshot['ask_p'] * book_snapshot['ask_q']) / (
            book_snapshot['bid_q'] + book_snapshot['ask_q'])
    bar_data = book_snapshot['mid_p'].resample(freq_str).ohlc()
    bar_data['buy_volume'] = book_snapshot['buy_volume'].resample(freq_str).sum()
    bar_data['sell_volume'] = book_snapshot['sell_volume'].resample(freq_str).sum()
    bar_data['volume'] = book_snapshot['volume'].resample(freq_str).sum()

    bar_data['hour_minute'] = 100 * bar_data.index.hour + bar_data.index.minute

    bar_data.to_pickle(file_name)

    return bar_data
Esempio n. 35
0
def generate_ocs_followup_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    broker = kwargs['broker']

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date,
                                                     ext='ta')

    con = msu.get_my_sql_connection(**kwargs)

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx',
                                engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [
        sc.convert_from_string_to_dictionary(
            string_input=strategy_frame['description_string'][x])
        ['strategy_class'] for x in range(len(strategy_frame.index))
    ]

    ocs_indx = [x == 'ocs' for x in strategy_class_list]
    ocs_frame = strategy_frame[ocs_indx]

    if ocs_frame.empty:
        writer.save()
        return

    results = [
        sf.get_results_4strategy(alias=ocs_frame['alias'].iloc[x],
                                 strategy_info_output=ocs_frame.iloc[x],
                                 con=con,
                                 broker=broker,
                                 date_to=as_of_date)
        for x in range(len(ocs_frame.index))
    ]

    ocs_followup_frame = pd.DataFrame(results)
    ocs_followup_frame['alias'] = ocs_frame['alias'].values

    kwargs['name'] = 'final'
    pnl_frame = pm.get_daily_pnl_snapshot(**kwargs)
    merged_frame1 = pd.merge(ocs_followup_frame,
                             pnl_frame,
                             how='left',
                             on='alias')
    ocs_followup_frame = merged_frame1[[
        'alias', 'dollar_noise', 'time_held', 'daily_pnl', 'total_pnl', 'notes'
    ]]
    ocs_followup_frame.reset_index(drop=True, inplace=True)
    ocs_followup_frame.loc[max(ocs_followup_frame.index) + 1] = [
        'TOTAL', np.nan, np.nan, ocs_followup_frame['daily_pnl'].sum(),
        ocs_followup_frame['total_pnl'].sum(), ''
    ]

    date_from30 = cu.doubledate_shift(as_of_date, 30)
    history_frame = ts.select_strategies(close_date_from=date_from30,
                                         close_date_to=as_of_date,
                                         con=con)
    strategy_class_list = [
        sc.convert_from_string_to_dictionary(
            string_input=history_frame['description_string'][x])
        ['strategy_class'] for x in range(len(history_frame.index))
    ]

    ocs_indx = [x == 'ocs' for x in strategy_class_list]

    ocs_history_frame = history_frame[ocs_indx]
    pnl_past_month = ocs_history_frame['pnl'].sum()

    as_of_datetime = cu.convert_doubledate_2datetime(as_of_date)
    date_from7 = as_of_datetime + dt.timedelta(days=-7)
    ocs_short_history_frame = ocs_history_frame[
        ocs_history_frame['close_date'] >= date_from7]
    pnl_past_week = ocs_short_history_frame['pnl'].sum()

    ocs_followup_frame.loc[max(ocs_followup_frame.index) + 1] = [
        'WEEKLY PERFORMANCE', np.nan, np.nan, np.nan, pnl_past_week, ''
    ]
    ocs_followup_frame.loc[max(ocs_followup_frame.index) + 1] = [
        'MONTHLY PERFORMANCE', np.nan, np.nan, np.nan, pnl_past_month, ''
    ]

    ocs_followup_frame['total_pnl'] = ocs_followup_frame['total_pnl'].astype(
        int)

    ocs_followup_frame.to_excel(writer, sheet_name='ocs')
    worksheet_ocs = writer.sheets['ocs']
    worksheet_ocs.freeze_panes(1, 0)
    worksheet_ocs.set_column('B:B', 26)

    worksheet_ocs.autofilter(0, 0, len(ocs_followup_frame.index),
                             len(ocs_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    writer.save()
def get_simple_rate(**kwargs):

    as_of_date = kwargs["as_of_date"]
    date_to = kwargs["date_to"]

    if "date_from" in kwargs.keys():
        date_from = kwargs["date_from"]
    else:
        date_from = as_of_date

    if "ticker_head" in kwargs.keys():
        ticker_head = kwargs["ticker_head"]
    else:
        ticker_head = "ED"

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext="ta")

    file_name = ta_output_dir + "/" + ticker_head + "_interest_curve.pkl"

    # print('as_of_date: ' + str(as_of_date) + ', date_to: ' + str(date_to))

    if os.path.isfile(file_name):
        price_frame = pd.read_pickle(file_name)

    if (not os.path.isfile(file_name)) or price_frame.empty:
        price_frame = gfp.get_futures_price_preloaded(ticker_head=ticker_head, settle_date=as_of_date)
        price_frame = price_frame[price_frame["close_price"].notnull()]

        price_frame.sort("tr_dte", ascending=True, inplace=True)
        price_frame["exp_date"] = [exp.get_futures_expiration(x) for x in price_frame["ticker"]]
        price_frame["implied_rate"] = 100 - price_frame["close_price"]
        price_frame.to_pickle(file_name)

    if price_frame.empty:
        return {
            "rate_output": np.NaN,
            "price_frame": pd.DataFrame(columns=["ticker", "cal_dte", "exp_date", "implied_rate"]),
        }

    datetime_to = cu.convert_doubledate_2datetime(date_to)
    datetime_from = cu.convert_doubledate_2datetime(date_from)

    price_frame_first = price_frame[price_frame["exp_date"] <= datetime_from]
    price_frame_middle = price_frame[
        (price_frame["exp_date"] > datetime_from) & (price_frame["exp_date"] < datetime_to)
    ]

    if price_frame_middle.empty:
        if not price_frame_first.empty:
            rate_output = price_frame_first["implied_rate"].iloc[-1] / 100
        else:
            rate_output = price_frame["implied_rate"].iloc[0] / 100
        return {
            "rate_output": rate_output,
            "price_frame": price_frame[["ticker", "cal_dte", "exp_date", "implied_rate"]],
        }

    if price_frame_first.empty:
        first_rate = price_frame_middle["implied_rate"].iloc[0]
        first_period = (price_frame_middle["exp_date"].iloc[0].to_datetime() - datetime_from).days
    else:
        first_rate = price_frame_first["implied_rate"].iloc[-1]
        first_period = (price_frame_middle["exp_date"].iloc[0].to_datetime() - datetime_from).days

    last_rate = price_frame_middle["implied_rate"].iloc[-1]
    last_period = (datetime_to - price_frame_middle["exp_date"].iloc[-1].to_datetime()).days

    middle_discount = [
        1
        + (
            price_frame_middle["implied_rate"].iloc[x]
            * (price_frame_middle["cal_dte"].iloc[x + 1] - price_frame_middle["cal_dte"].iloc[x])
            / 36500
        )
        for x in range(len(price_frame_middle.index) - 1)
    ]

    total_discount = (
        np.prod(np.array(middle_discount))
        * (1 + (first_rate * first_period / 36500))
        * (1 + (last_rate * last_period / 36500))
    )

    total_period = (
        (price_frame_middle["cal_dte"].iloc[-1] - price_frame_middle["cal_dte"].iloc[0]) + first_period + last_period
    )

    rate_output = (total_discount - 1) * 365 / total_period

    return {"rate_output": rate_output, "price_frame": price_frame[["ticker", "cal_dte", "exp_date", "implied_rate"]]}
def get_historical_risk_4open_strategies(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date, ext='ta')

    if os.path.isfile(ta_output_dir + '/portfolio_risk.pkl'):
        with open(ta_output_dir + '/portfolio_risk.pkl','rb') as handle:
            portfolio_risk_output = pickle.load(handle)
        return portfolio_risk_output

    con = msu.get_my_sql_connection(**kwargs)

    strategy_frame = ts.get_open_strategies(**kwargs)
    futures_data_dictionary = {x: gfp.get_futures_price_preloaded(ticker_head=x) for x in cmi.ticker_class.keys()}

    strategy_risk_frame = pd.DataFrame()

    historical_risk_output = [get_historical_risk_4strategy(alias=x,
                                                            as_of_date=as_of_date,
                                                            con=con,
                                                            futures_data_dictionary=futures_data_dictionary)
                              for x in strategy_frame['alias']]
    if 'con' not in kwargs.keys():
        con.close()

    strategy_risk_frame['alias'] = strategy_frame['alias']
    strategy_risk_frame['downside'] = [x['downside'] for x in historical_risk_output]
    strategy_risk_frame.sort('downside', ascending=True, inplace=True)

    ticker_head_list = su.flatten_list([list(x['ticker_head_based_pnl_5_change'].keys()) for x in historical_risk_output if x['downside'] != 0])
    unique_ticker_head_list = list(set(ticker_head_list))

    ticker_head_aggregated_pnl_5_change = {ticker_head: sum([x['ticker_head_based_pnl_5_change'][ticker_head] for x in historical_risk_output
         if x['downside'] != 0 and ticker_head in x['ticker_head_based_pnl_5_change'].keys()]) for ticker_head in unique_ticker_head_list}

    percentile_vector = [stats.get_number_from_quantile(y=ticker_head_aggregated_pnl_5_change[ticker_head],
                                                        quantile_list=[1, 15],
                                clean_num_obs=max(100, round(3*len(ticker_head_aggregated_pnl_5_change[ticker_head].values)/4)))
                         for ticker_head in unique_ticker_head_list]

    ticker_head_risk_frame = pd.DataFrame()
    ticker_head_risk_frame['tickerHead'] = unique_ticker_head_list
    ticker_head_risk_frame['downside'] = [(x[0]+x[1])/2 for x in percentile_vector]

    ticker_head_risk_frame.sort('downside', ascending=True, inplace=True)

    strategy_risk_frame['downside'] = strategy_risk_frame['downside'].round()
    ticker_head_risk_frame['downside'] = ticker_head_risk_frame['downside'].round()

    portfolio_risk_output = {'strategy_risk_frame': strategy_risk_frame,
                             'ticker_head_aggregated_pnl_5_change': ticker_head_aggregated_pnl_5_change,
                             'ticker_head_risk_frame':ticker_head_risk_frame}

    with open(ta_output_dir + '/portfolio_risk.pkl', 'wb') as handle:
        pickle.dump(portfolio_risk_output, handle)

    return portfolio_risk_output
def read_cme_settle_txt_files(**kwargs):

    file_name = kwargs['file_name']
    report_date = kwargs['report_date']

    options_data_dir = dn.get_dated_directory_extension(folder_date=report_date, ext='raw_options_data')

    if os.path.isfile(options_data_dir + '/' + file_name + '_formatted.pkl'):
        output_dictionary = pd.read_pickle(options_data_dir + '/' + file_name + '_formatted.pkl')
        return output_dictionary

    with open(options_data_dir + '/' + file_name + '.pkl', 'rb') as handle:
        raw_data = pickle.load(handle)

    title_list = []
    data_start_list = []
    data_end_list = []

    decoded_data = [x.decode('UTF-8') for x in raw_data]

    volume_start_indx = decoded_data[2].find('EST.VOL')-1
    interest_indx = decoded_data[2].find('INT')-5
    month_strike_indx = decoded_data[2].find('STRIKE')
    settle_indx = decoded_data[2].find('SETT')-4

    open_indx = decoded_data[2].find('OPEN')-4
    high_indx = decoded_data[2].find('HIGH')-4
    low_indx = decoded_data[2].find('LOW')-5

    for indx in range(len(decoded_data)):

        if any(x in decoded_data[indx] for x in ['OPTIONS', 'OPTION', 'Options', 'Option', 'FUTURE', 'Future',
                                                 'CSO', 'AIRASIA', 'SYNTH', 'BURSA', 'SPRD', 'Spread',
                                                 'CROSSRATE', 'WEEKLY', 'CALL', 'PUT','UTILITIES','CONSUMER','SP 500']):
            title_list.append(decoded_data[indx])

            if len(data_start_list) == 0:
                data_start_list.append(indx+1)
            else:
                data_start_list.append(indx+1)
                data_end_list.append(indx)

        if 'END OF REPORT' in decoded_data[indx]:
            data_end_list.append(indx)

    data_list = [decoded_data[data_start_list[x]:data_end_list[x]] for x in range(len(data_start_list))]

    volume_list = []
    volume_filtered_list = []

    interest_list = []
    interest_filtered_list = []

    settle_list = []
    open_list = []
    high_list = []
    low_list = []

    month_strike_list = []
    filter_1_list = []
    filter_2_list = []
    total_volume_list = []

    for i in range(len(data_list)):
        volume_column = [x[volume_start_indx:(volume_start_indx+10)] for x in data_list[i]]
        interest_column = [x[interest_indx:(interest_indx+10)] for x in data_list[i]]

        settle_column = [x[settle_indx:(settle_indx+9)] for x in data_list[i]]

        open_column = [x[open_indx:(open_indx+9)] for x in data_list[i]]
        high_column = [x[high_indx:(high_indx+9)] for x in data_list[i]]
        low_column = [x[low_indx:(low_indx+9)] for x in data_list[i]]

        month_strike_column = [x[month_strike_indx:(month_strike_indx+5)] for x in data_list[i]]
        month_strike_column_filtered = [x.strip() for x in month_strike_column if x != 'TOTAL']

        settle_column_filtered = [settle_column[x].strip() for x in range(len(settle_column)) if month_strike_column[x] != 'TOTAL']
        volume_column_filtered = [volume_column[x].strip() for x in range(len(volume_column)) if month_strike_column[x] != 'TOTAL']
        interest_column_filtered = [interest_column[x].strip() for x in range(len(interest_column)) if month_strike_column[x] != 'TOTAL']

        open_column_filtered = [open_column[x].strip() for x in range(len(open_column)) if month_strike_column[x] != 'TOTAL']
        high_column_filtered = [high_column[x].strip() for x in range(len(high_column)) if month_strike_column[x] != 'TOTAL']
        low_column_filtered = [low_column[x].strip() for x in range(len(low_column)) if month_strike_column[x] != 'TOTAL']

        filter_1 = ['TOTAL' not in x for x in month_strike_column]
        filter_1_list.append(filter_1)
        filter_2 = [bool((volume_column[x]).strip()) if filter_1[x] else False for x in range(len(volume_column))]
        filter_2_list.append(filter_2)
        volume_list.append(volume_column)
        volume_filtered_list.append(volume_column_filtered)

        interest_list.append(interest_column)
        interest_filtered_list.append(interest_column_filtered)

        settle_list.append(settle_column_filtered)
        open_list.append(open_column_filtered)
        high_list.append(high_column_filtered)
        low_list.append(low_column_filtered)
        month_strike_list.append(month_strike_column_filtered)

        total_volume_list.append((sum([int(volume_column[x]) if filter_2[x] else 0 for x in range(len(volume_column))])))

    title_frame = pd.DataFrame([process_title(x) for x in title_list])

    output_dictionary = {'decoded_data': decoded_data,
            'data_start_list': data_start_list,
            'data_end_list': data_end_list,
            'title_list': title_list,
            'data_list': data_list,
            'volume_list': volume_list,
            'volume_filtered_list': volume_filtered_list,
            'interest_list': interest_list,
            'interest_filtered_list': interest_filtered_list,
            'settle_list': settle_list,
            'open_list': open_list,
            'high_list': high_list,
            'low_list': low_list,
            'month_strike_list': month_strike_list,
            'filter_1_list': filter_1_list,
            'filter_2_list': filter_2_list,
            'total_volume_list': total_volume_list,
            'title_frame': title_frame}

    with open(options_data_dir + '/' + file_name + '_formatted.pkl', 'wb') as handle:
        pickle.dump(output_dictionary, handle)

    return output_dictionary
Esempio n. 39
0
import get_price.save_stock_data as ssd
import my_sql_routines.options_pnl_loader as opnl

commodity_address = 'ftp://ftp.cmegroup.com/pub/settle/stlags'
equity_address = 'ftp://ftp.cmegroup.com/pub/settle/stleqt'
fx_address = 'ftp://ftp.cmegroup.com/pub/settle/stlcur'
interest_rate_address = 'ftp://ftp.cmegroup.com/pub/settle/stlint'
comex_futures_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/comex_future.csv'
comex_options_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/comex_option.csv'
nymex_futures_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/nymex_future.csv'
nymex_options_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/nymex_option.csv'

#folder_date = cu.get_doubledate()
folder_date = exp.doubledate_shift_bus_days()

options_data_dir = dn.get_dated_directory_extension(folder_date=folder_date,
                                                    ext='raw_options_data')

commodity_output = sd.download_txt_from_web(web_address=commodity_address)
equity_output = sd.download_txt_from_web(web_address=equity_address)
fx_output = sd.download_txt_from_web(web_address=fx_address)
interest_rate_output = sd.download_txt_from_web(
    web_address=interest_rate_address)

comex_futures_output = sd.download_csv_from_web(
    web_address=comex_futures_csv_address)
comex_options_output = sd.download_csv_from_web(
    web_address=comex_options_csv_address)

nymex_futures_output = sd.download_csv_from_web(
    web_address=nymex_futures_csv_address)
nymex_options_output = sd.download_csv_from_web(
import formats.options_strategy_formats as osf
import formats.intraday_futures_strategy_formats as ifsf
import ta.prepare_daily as prep

commodity_address = 'ftp://ftp.cmegroup.com/pub/settle/stlags'
equity_address = 'ftp://ftp.cmegroup.com/pub/settle/stleqt'
fx_address = 'ftp://ftp.cmegroup.com/pub/settle/stlcur'
interest_rate_address = 'ftp://ftp.cmegroup.com/pub/settle/stlint'
comex_futures_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/comex_future.csv'
comex_options_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/comex_option.csv'
nymex_futures_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/nymex_future.csv'
nymex_options_csv_address = 'ftp://ftp.cmegroup.com/pub/settle/nymex_option.csv'

folder_date = cu.get_doubledate()

options_data_dir = dn.get_dated_directory_extension(folder_date=folder_date, ext='raw_options_data')

commodity_output = sd.download_txt_from_web(web_address=commodity_address)
equity_output = sd.download_txt_from_web(web_address=equity_address)
fx_output = sd.download_txt_from_web(web_address=fx_address)
interest_rate_output = sd.download_txt_from_web(web_address=interest_rate_address)

comex_futures_output = sd.download_csv_from_web(web_address=comex_futures_csv_address)
comex_options_output = sd.download_csv_from_web(web_address=comex_options_csv_address)

nymex_futures_output = sd.download_csv_from_web(web_address=nymex_futures_csv_address)
nymex_options_output = sd.download_csv_from_web(web_address=nymex_options_csv_address)

with open(options_data_dir + '/commodity.pkl', 'wb') as handle:
        pickle.dump(commodity_output, handle)
Esempio n. 41
0
def generate_futures_butterfly_followup_report(**kwargs):

    con = msu.get_my_sql_connection(**kwargs)

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        ta_output_dir = dn.get_dated_directory_extension(
            folder_date=as_of_date, ext='ta')
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx',
                                engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [
        sc.convert_from_string_to_dictionary(
            string_input=strategy_frame['description_string'][x])
        ['strategy_class'] for x in range(len(strategy_frame.index))
    ]

    futures_butterfly_indx = [
        x == 'futures_butterfly' for x in strategy_class_list
    ]

    futures_butterfly_frame = strategy_frame[futures_butterfly_indx]

    results = [
        sf.get_results_4strategy(
            alias=futures_butterfly_frame['alias'].iloc[x],
            strategy_info_output=futures_butterfly_frame.iloc[x])
        for x in range(len(futures_butterfly_frame.index))
    ]

    butterfly_followup_frame = pd.DataFrame(results)
    butterfly_followup_frame['alias'] = futures_butterfly_frame['alias'].values

    pnl_frame = pm.get_daily_pnl_snapshot(as_of_date=as_of_date,
                                          con=con,
                                          name='final')
    risk_output = hr.get_historical_risk_4open_strategies(
        as_of_date=as_of_date, con=con)

    merged_frame1 = pd.merge(butterfly_followup_frame,
                             pnl_frame,
                             how='left',
                             on='alias')
    merged_frame2 = pd.merge(merged_frame1,
                             risk_output['strategy_risk_frame'],
                             how='left',
                             on='alias')

    butterfly_followup_frame = merged_frame2[[
        'alias', 'ticker_head', 'holding_tr_dte', 'short_tr_dte', 'z1_initial',
        'z1', 'QF_initial', 'QF', 'total_pnl', 'downside', 'recommendation'
    ]]

    butterfly_followup_frame.rename(columns={
        'alias': 'Alias',
        'ticker_head': 'TickerHead',
        'holding_tr_dte': 'HoldingTrDte',
        'short_tr_dte': 'ShortTrDte',
        'z1_initial': 'Z1Initial',
        'z1': 'Z1',
        'QF_initial': 'QFInitial',
        'total_pnl': 'TotalPnl',
        'downside': 'Downside',
        'recommendation': 'Recommendation'
    },
                                    inplace=True)

    butterfly_followup_frame.sort_values('QF', ascending=False, inplace=True)

    butterfly_followup_frame['Z1'] = butterfly_followup_frame['Z1'].round(2)

    butterfly_followup_frame.to_excel(writer, sheet_name='butterflies')
    worksheet_butterflies = writer.sheets['butterflies']

    worksheet_butterflies.set_column('B:B', 26)
    worksheet_butterflies.freeze_panes(1, 0)

    worksheet_butterflies.autofilter(0, 0, len(butterfly_followup_frame.index),
                                     len(butterfly_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    return writer
def get_book_snapshot_4ticker(**kwargs):

    if 'folder_date' in kwargs.keys():
        folder_date = kwargs['folder_date']
    else:
        folder_date = exp.doubledate_shift_bus_days()

    if 'freq_str' in kwargs.keys():
        freq_str = kwargs['freq_str']
    else:
        freq_str = 'T'

    ticker = kwargs['ticker']

    data_dir = dn.get_dated_directory_extension(ext='intraday_ttapi_data_fixed_interval', folder_date=folder_date)
    file_name = data_dir + '/' + ticker + '_' + freq_str + '.pkl'

    if os.path.isfile(file_name):
        book_snapshot = pd.read_pickle(file_name)
        return book_snapshot

    data_frame_out = load_csv_file_4ticker(**kwargs)

    if data_frame_out.empty:
        return pd.DataFrame(columns=['best_bid_p','best_bid_q','best_ask_p','best_ask_q'])

    start_datetime = dt.datetime.utcfromtimestamp(data_frame_out['time'].values[0].tolist()/1e9).replace(microsecond=0, second=0)
    end_datetime = dt.datetime.utcfromtimestamp(data_frame_out['time'].values[-1].tolist()/1e9).replace(microsecond=0, second=0)

    merged_index = pd.date_range(start=start_datetime,end=end_datetime,freq=freq_str)

    data_frame_out.set_index('time', inplace=True, drop=True)
    best_bid_p = data_frame_out[data_frame_out['field'] == 'BestBidPrice']
    best_bid_p = best_bid_p.groupby(best_bid_p.index).last()
    best_bid_p = best_bid_p.reindex(merged_index,method='pad')

    best_bid_q = data_frame_out[data_frame_out['field'] == 'BestBidQuantity']
    best_bid_q = best_bid_q.groupby(best_bid_q.index).last()
    best_bid_q = best_bid_q.reindex(merged_index,method='pad')

    best_ask_p = data_frame_out[data_frame_out['field'] == 'BestAskPrice']
    best_ask_p = best_ask_p.groupby(best_ask_p.index).last()
    best_ask_p = best_ask_p.reindex(merged_index,method='pad')

    best_ask_q = data_frame_out[data_frame_out['field'] == 'BestAskQuantity']
    best_ask_q = best_ask_q.groupby(best_ask_q.index).last()
    best_ask_q = best_ask_q.reindex(merged_index,method='pad')

    book_snapshot = pd.DataFrame(index=merged_index)

    book_snapshot['best_bid_p'] = best_bid_p['value'].astype('float64')
    book_snapshot['best_bid_q'] = best_bid_q['value']
    book_snapshot['best_ask_p'] = best_ask_p['value'].astype('float64')
    book_snapshot['best_ask_q'] = best_ask_q['value']

    ticker_head = cmi.get_contract_specs(kwargs['ticker'])['ticker_head']

    book_snapshot['best_bid_p'] = [tfl.convert_trade_price_from_tt(price=x,ticker_head=ticker_head) for x in book_snapshot['best_bid_p']]
    book_snapshot['best_ask_p'] = [tfl.convert_trade_price_from_tt(price=x,ticker_head=ticker_head) for x in book_snapshot['best_ask_p']]

    book_snapshot.to_pickle(file_name)
    return book_snapshot
Esempio n. 43
0
def read_cme_settle_txt_files(**kwargs):

    file_name = kwargs['file_name']
    report_date = kwargs['report_date']

    options_data_dir = dn.get_dated_directory_extension(folder_date=report_date, ext='raw_options_data')
    if os.path.isfile(options_data_dir + '/' + file_name + '_formatted.pkl'):
        output_dictionary = pd.read_pickle(options_data_dir + '/' + file_name + '_formatted.pkl')
        return output_dictionary

    with open(options_data_dir + '/' + file_name + '.pkl', 'rb') as handle:
        raw_data = pickle.load(handle)

    title_list = []
    data_start_list = []
    data_end_list = []

    decoded_data = [x.decode('UTF-8') for x in raw_data]

    volume_start_indx = decoded_data[2].find('EST.VOL')-1
    interest_indx = decoded_data[2].find('INT')-5
    month_strike_indx = decoded_data[2].find('STRIKE')
    settle_indx = decoded_data[2].find('SETT')-4

    open_indx = decoded_data[2].find('OPEN')-4
    high_indx = decoded_data[2].find('HIGH')-4
    low_indx = decoded_data[2].find('LOW')-5

    for indx in range(len(decoded_data)):

        if any(x in decoded_data[indx] for x in ['OPTIONS', 'OPTION', 'Options', 'Option', 'FUTURE', 'Future',
                                                 'CSO', 'AIRASIA', 'SYNTH', 'BURSA', 'SPRD', 'Spread',
                                                 'CROSSRATE', 'WEEKLY', 'CALL', 'PUT','UTILITIES','CONSUMER','SP 500','sector','SP500',
                                                 'NQ', 'S&P','Black','Sea','WHEAT','Wheat','Financially','Settled']):
            title_list.append(decoded_data[indx])

            if len(data_start_list) == 0:
                data_start_list.append(indx+1)
            else:
                data_start_list.append(indx+1)
                data_end_list.append(indx)

        if 'END OF REPORT' in decoded_data[indx]:
            data_end_list.append(indx)

    data_list = [decoded_data[data_start_list[x]:data_end_list[x]] for x in range(len(data_start_list))]

    volume_list = []
    volume_filtered_list = []

    interest_list = []
    interest_filtered_list = []

    settle_list = []
    open_list = []
    high_list = []
    low_list = []

    month_strike_list = []
    filter_1_list = []
    filter_2_list = []
    total_volume_list = []

    for i in range(len(data_list)):
        volume_column = [x[volume_start_indx:(volume_start_indx+10)] for x in data_list[i]]
        interest_column = [x[interest_indx:(interest_indx+10)] for x in data_list[i]]

        settle_column = [x[settle_indx:(settle_indx+9)] for x in data_list[i]]

        open_column = [x[open_indx:(open_indx+9)] for x in data_list[i]]
        high_column = [x[high_indx:(high_indx+9)] for x in data_list[i]]
        low_column = [x[low_indx:(low_indx+9)] for x in data_list[i]]

        month_strike_column = [x[month_strike_indx:(month_strike_indx+5)] for x in data_list[i]]
        month_strike_column_filtered = [x.strip() for x in month_strike_column if x != 'TOTAL']

        settle_column_filtered = [settle_column[x].strip() for x in range(len(settle_column)) if month_strike_column[x] != 'TOTAL']
        volume_column_filtered = [volume_column[x].strip() for x in range(len(volume_column)) if month_strike_column[x] != 'TOTAL']
        interest_column_filtered = [interest_column[x].strip() for x in range(len(interest_column)) if month_strike_column[x] != 'TOTAL']

        open_column_filtered = [open_column[x].strip() for x in range(len(open_column)) if month_strike_column[x] != 'TOTAL']
        high_column_filtered = [high_column[x].strip() for x in range(len(high_column)) if month_strike_column[x] != 'TOTAL']
        low_column_filtered = [low_column[x].strip() for x in range(len(low_column)) if month_strike_column[x] != 'TOTAL']

        filter_1 = ['TOTAL' not in x for x in month_strike_column]
        filter_1_list.append(filter_1)
        filter_2 = [bool((volume_column[x]).strip()) if filter_1[x] else False for x in range(len(volume_column))]
        filter_2_list.append(filter_2)
        volume_list.append(volume_column)
        volume_filtered_list.append(volume_column_filtered)

        interest_list.append(interest_column)
        interest_filtered_list.append(interest_column_filtered)

        settle_list.append(settle_column_filtered)
        open_list.append(open_column_filtered)
        high_list.append(high_column_filtered)
        low_list.append(low_column_filtered)
        month_strike_list.append(month_strike_column_filtered)

        total_volume_list.append((sum([int(volume_column[x]) if filter_2[x] else 0 for x in range(len(volume_column))])))
    #return title_list
    title_frame = pd.DataFrame([process_title(x) for x in title_list])

    output_dictionary = {'decoded_data': decoded_data,
            'data_start_list': data_start_list,
            'data_end_list': data_end_list,
            'title_list': title_list,
            'data_list': data_list,
            'volume_list': volume_list,
            'volume_filtered_list': volume_filtered_list,
            'interest_list': interest_list,
            'interest_filtered_list': interest_filtered_list,
            'settle_list': settle_list,
            'open_list': open_list,
            'high_list': high_list,
            'low_list': low_list,
            'month_strike_list': month_strike_list,
            'filter_1_list': filter_1_list,
            'filter_2_list': filter_2_list,
            'total_volume_list': total_volume_list,
            'title_frame': title_frame}

    with open(options_data_dir + '/' + file_name + '_formatted.pkl', 'wb') as handle:
        pickle.dump(output_dictionary, handle)

    return output_dictionary
Esempio n. 44
0
def generate_vcs_followup_report(**kwargs):

    if 'as_of_date' in kwargs.keys():
        as_of_date = kwargs['as_of_date']
    else:
        as_of_date = exp.doubledate_shift_bus_days()
        kwargs['as_of_date'] = as_of_date

    ta_output_dir = dn.get_dated_directory_extension(folder_date=as_of_date,
                                                     ext='ta')

    con = msu.get_my_sql_connection(**kwargs)

    if 'writer' in kwargs.keys():
        writer = kwargs['writer']
    else:
        writer = pd.ExcelWriter(ta_output_dir + '/followup.xlsx',
                                engine='xlsxwriter')

    strategy_frame = ts.get_open_strategies(**kwargs)

    strategy_class_list = [
        sc.convert_from_string_to_dictionary(
            string_input=strategy_frame['description_string'][x])
        ['strategy_class'] for x in range(len(strategy_frame.index))
    ]

    vcs_indx = [x == 'vcs' for x in strategy_class_list]
    vcs_frame = strategy_frame[vcs_indx]

    if len(vcs_frame.index) == 0:
        return writer

    results = [
        sf.get_results_4strategy(alias=vcs_frame['alias'].iloc[x],
                                 strategy_info_output=vcs_frame.iloc[x])
        for x in range(len(vcs_frame.index))
    ]

    vcs_followup_frame = pd.DataFrame(results)
    vcs_followup_frame['alias'] = vcs_frame['alias'].values

    kwargs['name'] = 'final'
    pnl_frame = pm.get_daily_pnl_snapshot(**kwargs)
    merged_frame1 = pd.merge(vcs_followup_frame,
                             pnl_frame,
                             how='left',
                             on='alias')

    vcs_followup_frame = merged_frame1[[
        'alias', 'last_adjustment_days_ago', 'min_tr_dte', 'long_short_ratio',
        'net_oev', 'net_theta', 'long_oev', 'short_oev', 'favQMove',
        'total_pnl', 'recommendation'
    ]]

    vcs_followup_frame['long_short_ratio'] = vcs_followup_frame[
        'long_short_ratio'].round()
    vcs_followup_frame['net_oev'] = vcs_followup_frame['net_oev'].round(1)
    vcs_followup_frame['long_oev'] = vcs_followup_frame['long_oev'].round(1)
    vcs_followup_frame['short_oev'] = vcs_followup_frame['short_oev'].round(1)
    vcs_followup_frame['net_theta'] = vcs_followup_frame['net_theta'].round(1)

    vcs_followup_frame.sort_values('total_pnl', ascending=False, inplace=True)
    vcs_followup_frame.reset_index(drop=True, inplace=True)
    vcs_followup_frame.loc[len(vcs_followup_frame.index)] = [
        'TOTAL', None, None, None, None, vcs_followup_frame['net_theta'].sum(),
        None, None, None, vcs_followup_frame['total_pnl'].sum(), None
    ]

    vcs_followup_frame.to_excel(writer, sheet_name='vcs')
    worksheet_vcs = writer.sheets['vcs']
    worksheet_vcs.set_column('B:B', 18)
    worksheet_vcs.freeze_panes(1, 0)

    worksheet_vcs.autofilter(0, 0, len(vcs_followup_frame.index),
                             len(vcs_followup_frame.columns))

    if 'con' not in kwargs.keys():
        con.close()

    return writer