def cacluate_needed_data(symb1, symb2, csv_file_full_path):

    _pairs = pair_trade.create_pairs_dataframe(setting.get_input_data_dir(),
                                               symb1, symb2)
    _pairs = _pairs.sort_values('DATE', ascending=True)
    _pairs = pair_trade.calculate_spread_zscore(_pairs, symb1, symb2)

    if ft.is_file_exists(csv_file_full_path):
        csv_pairs = ft.read_csv(csv_file_full_path)
        csv_pairs['DATE'] = pd.to_datetime(csv_pairs['DATE'])
        csv_pairs = csv_pairs.sort_values('DATE', ascending=True)
        csv_pairs.index = csv_pairs['DATE']

        last_row_date = csv_pairs.tail(1).index
        # print ('last_row_date {0}'.format(last_row_date))

        _pairs = _pairs.combine_first(csv_pairs)
        _pairs = _pairs.loc[:, [
            'OPEN_' + symb1, 'CLOSE_' + symb1, 'OPEN_' + symb2, 'CLOSE_' +
            symb2, 'saya_divide', 'saya_divide_mean', 'saya_divide_std',
            'saya_divide_sigma', 'deviation_rate(%)', 'CORR_3M', 'COINT_3M',
            'CORR_1Y', 'COINT_1Y'
        ]]

        _pairs = _pairs.sort_values('DATE', ascending=False)
        pair_back_test.set_corr_and_coint(_pairs, symb1, symb2, last_row_date)
        ft.write_csv(_pairs, csv_file_full_path)

    else:
        _pairs = _pairs.sort_values('DATE', ascending=False)
        pair_back_test.set_corr_and_coint(_pairs, symb1, symb2)
        ft.write_csv(_pairs, csv_file_full_path)
def main(targetYear=None, targetMonth=None):
    print('Watching List data caculate main start!')
    file_name = os.path.join(setting.get_root_dir(), excel_file_name)
    workbook = openpyxl.load_workbook(file_name, data_only=True)
    sheet = workbook[sheet_name_Watching_Input]
    record_list = []
    symbols_corr_list = []

    ft.clean_target_dir(os.path.join(setting.get_watching_list_file_dir()))
    for i in range(4, sheet.max_row + 1, 1):

        record = WatchingRecord()
        code1 = str(sheet.cell(row=i, column=3).value)
        code2 = str(sheet.cell(row=i, column=7).value)
        if (code1 is None or code2 is None):
            continue
        record.code1 = code1
        record.code2 = code2
        record_list.append(record)

    for record in record_list:
        symb1 = record.code1
        symb2 = record.code2

        if (symb1 is None or symb2 is None or len(symb1) <= 0
                or len(symb2) <= 0 or symb1 == "None" or symb2 == "None"):
            continue

        _pairs = pairs_main.create_pairs_dataframe(
            setting.get_input_data_dir(), symb1, symb2)
        corr_3m, corr_1y = trade_util.check_corr(_pairs, symb1, symb2)
        coint_3m, coint_1y = trade_util.check_cointegration(
            _pairs, symb1, symb2)
        symbols_corr_list.append(
            [symb1, symb2, corr_3m, corr_1y, coint_3m, coint_1y])
        _pairs = _pairs.sort_values('DATE', ascending=True)
        _pairs = pairs_main.calculate_spread_zscore(_pairs, symb1, symb2)
        _pairs = _pairs.sort_values('DATE', ascending=False)
        file_util.write_csv(
            _pairs,
            os.path.join(setting.get_watching_list_file_dir(),
                         symb1 + '_' + symb2 + '.csv'))

    corr_data = sorted(symbols_corr_list, key=itemgetter(3),
                       reverse=True)  # sort by 3 month corr
    corr_data = pd.DataFrame(columns=[
        'SYM_A', 'SYM_B', 'CORR_3M', 'CORR_1Y', 'COINT_3M', 'COINT_1Y'
    ],
                             data=corr_data)
    # file_util.write_csv(corr_data, os.path.join(setting.get_result_dir(), corr_result_file_name))

    pairs_main.output_report(corr_data, False,
                             setting.get_watching_list_file_dir(),
                             setting.watching_corr_result_file_name)

    print('Watching List data caculate main end!')
def generate_day_report(target_date, file_name_list):

    day_report_file_name = os.path.join(
        backtest_report_each_day_dir,
        target_date.strftime("%Y-%m-%d") + '.csv')

    # temp_data = ft.read_csv(os.path.join(caculated_csv_dir, file_name_list[0] + '.csv'))
    # day_report_data = pd.DataFrame([], columns=temp_data.columns)
    day_report_data = pd.DataFrame()

    for file_name in file_name_list:

        _temp = file_name.split('_')
        symb1 = _temp[0]
        symb2 = _temp[1]

        pairs_data = ft.read_csv(
            os.path.join(caculated_csv_dir, file_name + '.csv'))
        pairs_data['DATE'] = pd.to_datetime(pairs_data['DATE'])
        pairs_data.index = pairs_data['DATE']
        search_data = pairs_data[target_date:target_date]
        if search_data.empty:
            # print('empty {0}'.format(target_date))
            return

        search_data1 = search_data.copy(deep=True)
        search_data1.rename(columns={
            'OPEN_' + symb1: 'OPEN_A',
            'CLOSE_' + symb1: 'CLOSE_A',
            'OPEN_' + symb2: 'OPEN_B',
            'CLOSE_' + symb2: 'CLOSE_B'
        },
                            inplace=True)

        search_data1['SYM_A'] = symb1
        search_data1['SYM_B'] = symb2

        search_data1 = search_data1.loc[:, [
            'SYM_A', 'OPEN_A', 'CLOSE_A', 'SYM_B', 'OPEN_B', 'CLOSE_B',
            'saya_divide', 'saya_divide_mean', 'saya_divide_std',
            'saya_divide_sigma', 'deviation_rate(%)', 'CORR_3M', 'COINT_3M',
            'CORR_1Y', 'COINT_1Y'
        ]]

        if search_data1.at[target_date, 'CORR_3M'] < CORR_THRE_SHOLD_THREE_MONTH \
                or search_data.at[target_date, 'CORR_1Y'] < CORR_THRE_SHOLD_ONE_YEAR:
            continue

        if search_data1.at[target_date, 'COINT_3M'] > COINT_MAX_VAL \
            or search_data.at[target_date, 'COINT_1Y'] > COINT_MAX_VAL:
            continue

        day_report_data = day_report_data.append(search_data1)

    if not day_report_data.empty:
        ft.write_csv(day_report_data, day_report_file_name)
def generate_caculated_data_csv(symbols,
                                caculated_csv_path=caculated_csv_dir,
                                startdate=None,
                                enddate=None,
                                mode='insert'):
    symbol_check_dict = {}

    if (mode == 'create'):
        ft.clean_target_dir(caculated_csv_path)
    else:
        ft.create_target_dir(caculated_csv_path)

    index1 = 0
    for symb1 in symbols:
        index1 = index1 + 1
        print('Processing {0}/{1} {2}...'.format(index1, len(symbols), symb1))
        for symb2 in symbols:

            if (symb1 == symb2 or (symb1 + symb2) in symbol_check_dict
                    or (symb2 + symb1) in symbol_check_dict):
                continue
            symbol_check_dict[symb1 + symb2] = ''

            _pairs = pairs_util.create_pairs_dataframe(
                setting.get_input_data_dir(), symb1, symb2)

            if startdate is not None:
                start_date = datetime.datetime.strftime(startdate, '%Y-%m-%d')
                _pairs = _pairs[_pairs.index >= start_date]
            if enddate is not None:
                end_date = datetime.datetime.strftime(enddate, '%Y-%m-%d')
                _pairs = _pairs[_pairs.index <= end_date]
            #_pairs = _pairs[(_pairs.index >= startdate) & (_pairs.index <= enddate)]

            result_write_csv = os.path.join(caculated_csv_path,
                                            symb1 + '_' + symb2 + '.csv')
            _pairs = _pairs.sort_values('DATE', ascending=True)
            _pairs = pairs_util.calculate_spread_zscore(_pairs, symb1, symb2)

            if ft.is_file_exists(result_write_csv):
                csv_pairs = ft.read_csv(result_write_csv)
                csv_pairs['DATE'] = pd.to_datetime(csv_pairs['DATE'])
                csv_pairs = csv_pairs.sort_values('DATE', ascending=True)
                csv_pairs.index = csv_pairs['DATE']

                last_row_date = csv_pairs.tail(1).index
                # print ('last_row_date {0}'.format(last_row_date))

                _pairs = _pairs.combine_first(csv_pairs)
                result_write_csv = os.path.join(caculated_csv_path,
                                                symb1 + '_' + symb2 + '.csv')

                _pairs = _pairs.loc[:, [
                    'OPEN_' + symb1, 'CLOSE_' + symb1, 'OPEN_' +
                    symb2, 'CLOSE_' + symb2, 'saya_divide', 'saya_divide_mean',
                    'saya_divide_std', 'saya_divide_sigma',
                    'deviation_rate(%)', 'CORR_3M', 'COINT_3M', 'CORR_1Y',
                    'COINT_1Y'
                ]]

                _pairs = _pairs.sort_values('DATE', ascending=False)
                set_corr_and_coint(_pairs, symb1, symb2, last_row_date)
                ft.write_csv(_pairs, result_write_csv)

            else:
                _pairs = _pairs.sort_values('DATE', ascending=False)
                set_corr_and_coint(_pairs, symb1, symb2)
                ft.write_csv(_pairs, result_write_csv)
示例#5
0
def output_report(corr_df, isFastCaculateMode, resultDir,
                  corr_result_file_name):

    print('Output Report Processing...')
    timestr = time.strftime("%Y%m%d-%H%M%S")
    report_file = os.path.join(resultDir, 'report_' + timestr + '.xlsx')

    # corr_df = file_util.read_csv(os.path.join(setting.get_result_dir(), corr_result_file_name))
    master_df = file_util.read_csv(os.path.join(setting.get_master_file_dir()))
    corr_df = trade_util.addMasterInfo(corr_df, master_df)

    corr_df_new = corr_df.copy(deep=True)

    OPEN_A_list = []
    CLOSE_A_list = []
    OPEN_B_list = []
    CLOSE_B_list = []
    SIGMA = []
    ABS_SIGMA = []
    LAST_DAY_SIGMA = []
    TRADE_A = []
    TRADE_B = []
    DEV_RATE = []
    AXIS_LOT_SIZE = []
    PAIR_LOT_SIZE = []
    LOT_SIZE_DIFF = []

    total_profit_list = []
    average_profit_list = []
    average_plt_list = []
    total_times_list = []
    plus_times_list = []
    minus_times_list = []
    pl_times_list = []
    open_days_list = []
    stop_profit_times_list = []
    stop_loss_times_list = []
    max_day_over_times_list = []

    DATE_3M_ago_list = []
    CLOSE_A_3M_ago_list = []
    CLOSE_B_3M_ago_list = []

    DATE_6M_ago_list = []
    CLOSE_A_6M_ago_list = []
    CLOSE_B_6M_ago_list = []

    DATE_1y_ago_list = []
    CLOSE_A_1y_ago_list = []
    CLOSE_B_1y_ago_list = []

    DATE_2y_ago_list = []
    CLOSE_A_2y_ago_list = []
    CLOSE_B_2y_ago_list = []

    index1 = 0
    for index, row in corr_df.iterrows():
        # print('row.SYM_A:'+str(int(row.SYM_A)))
        symblA = str(int(row.SYM_A))
        symblB = str(int(row.SYM_B))
        # print('symblA=%s symblB=%s' % (symblA,symblB))
        index1 = index1 + 1
        print('Processing {0}/{1} {2} - {3}...'.format(index1, len(corr_df),
                                                       symblA, symblB))

        try:
            _file = os.path.join(resultDir, symblA + '_' + symblB + '.csv')
            _df = file_util.read_csv(_file)

            OPEN_A_list.append(_df['OPEN_' + symblA][0])
            CLOSE_A_list.append(_df['CLOSE_' + symblA][0])
            OPEN_B_list.append(_df['OPEN_' + symblB][0])
            CLOSE_B_list.append(_df['CLOSE_' + symblB][0])
            SIGMA.append(_df['saya_divide_sigma'][0])
            ABS_SIGMA.append(np.abs(_df['saya_divide_sigma'][0]))
            LAST_DAY_SIGMA.append(np.abs(_df['saya_divide_sigma'][1]))

            if (_df['saya_divide_sigma'][0] > 0):
                TRADE_A.append("SELL")
                TRADE_B.append("BUY")
            else:
                TRADE_A.append("BUY")
                TRADE_B.append("SELL")

            DEV_RATE.append(_df['deviation_rate(%)'][0])

            axis_lot_size, pair_lot_size, lot_size_diff = trade_util.get_lot_size(
                _df['CLOSE_' + symblA][0], _df['CLOSE_' + symblB][0])
            # print(axis_lot_size)
            AXIS_LOT_SIZE.append(axis_lot_size)
            PAIR_LOT_SIZE.append(pair_lot_size)
            LOT_SIZE_DIFF.append(lot_size_diff)

            # print(_df)
            total_profit, average_profit, average_pl, total_times, plus_times, minus_times, open_days, stop_profit_times, stop_loss_times, \
            max_day_over_times = signal_generate(_df, symblA, symblB, resultDir)

            total_profit_list.append(total_profit)
            average_profit_list.append(average_profit)
            average_plt_list.append(average_pl)
            total_times_list.append(total_times)
            plus_times_list.append(plus_times)
            minus_times_list.append(minus_times)
            if plus_times <= 0 or total_times <= 0:
                pl_times_list.append(0)
            else:
                pl_times_list.append(round(plus_times / total_times * 100, 2))
            open_days_list.append(open_days)

            stop_profit_times_list.append(stop_profit_times)
            stop_loss_times_list.append(stop_loss_times)
            max_day_over_times_list.append(max_day_over_times)

            date_3m_ago, CLOSE_A_3M_ago, CLOSE_B_3M_ago, date_6m_ago, CLOSE_A_6M_ago, CLOSE_B_6M_ago, date_1y_ago, \
            CLOSE_A_1Y_ago, CLOSE_B_1Y_ago, date_2y_ago, CLOSE_A_2Y_ago, CLOSE_B_2Y_ago = trade_util.get_before_close_price_data(_df, symblA, symblB)

            DATE_3M_ago_list.append(date_3m_ago)
            CLOSE_A_3M_ago_list.append(CLOSE_A_3M_ago)
            CLOSE_B_3M_ago_list.append(CLOSE_B_3M_ago)

            DATE_6M_ago_list.append(date_6m_ago)
            CLOSE_A_6M_ago_list.append(CLOSE_A_6M_ago)
            CLOSE_B_6M_ago_list.append(CLOSE_B_6M_ago)

            DATE_1y_ago_list.append(date_1y_ago)
            CLOSE_A_1y_ago_list.append(CLOSE_A_1Y_ago)
            CLOSE_B_1y_ago_list.append(CLOSE_B_1Y_ago)

            DATE_2y_ago_list.append(date_2y_ago)
            CLOSE_A_2y_ago_list.append(CLOSE_A_2Y_ago)
            CLOSE_B_2y_ago_list.append(CLOSE_B_2Y_ago)

            # path, ext = os.path.splitext(os.path.basename(_file))
            # _df.to_excel(writer, sheet_name=path)

        except FileNotFoundError:
            OPEN_A_list.append(0)
            CLOSE_A_list.append(0)
            OPEN_B_list.append(0)
            CLOSE_B_list.append(0)
            SIGMA.append(0)
            ABS_SIGMA.append(0)
            LAST_DAY_SIGMA.append(0)
            DEV_RATE.append(0)
            AXIS_LOT_SIZE.append(0)
            PAIR_LOT_SIZE.append(0)
            LOT_SIZE_DIFF.append(0)

            TRADE_A.append("")
            TRADE_B.append("")

            total_profit_list.append(0)
            average_profit_list.append(0)
            average_plt_list.append(0)
            total_times_list.append(0)
            plus_times_list.append(0)
            minus_times_list.append(0)
            pl_times_list.append(0)
            open_days_list.append(0)

            stop_profit_times_list.append(0)
            stop_loss_times_list.append(0)
            max_day_over_times_list.append(0)

            DATE_3M_ago_list.append(0)
            CLOSE_A_3M_ago_list.append(0)
            CLOSE_B_3M_ago_list.append(0)

            DATE_6M_ago_list.append(0)
            CLOSE_A_6M_ago_list.append(0)
            CLOSE_B_6M_ago_list.append(0)

            DATE_1y_ago_list.append(0)
            CLOSE_A_1y_ago_list.append(0)
            CLOSE_B_1y_ago_list.append(0)

            DATE_2y_ago_list.append(0)
            CLOSE_A_2y_ago_list.append(0)
            CLOSE_B_2y_ago_list.append(0)

            continue

    corr_df_new = corr_df_new.assign(
        OPEN_A=OPEN_A_list,
        CLOSE_A=CLOSE_A_list,
        OPEN_B=OPEN_B_list,
        CLOSE_B=CLOSE_B_list,
        SIGMA=SIGMA,
        ABS_SIGMA=ABS_SIGMA,
        LAST_DAY_SIGMA=LAST_DAY_SIGMA,
        TRADE_A=TRADE_A,
        TRADE_B=TRADE_B,
        DEV_RATE=DEV_RATE,
        AXIS_LOT_SIZE=AXIS_LOT_SIZE,
        PAIR_LOT_SIZE=PAIR_LOT_SIZE,
        LOT_SIZE_DIFF=LOT_SIZE_DIFF,
        total_profit=total_profit_list,
        average_profit=average_profit_list,
        average_pl=average_plt_list,
        total_times=total_times_list,
        plus_times=plus_times_list,
        minus_times=minus_times_list,
        pl_times=pl_times_list,
        open_days=open_days_list,
        stop_profit_times=stop_profit_times_list,
        stop_loss_times=stop_loss_times_list,
        max_day_over_times=max_day_over_times_list,
        DATE_3M_ago=DATE_3M_ago_list,
        CLOSE_A_3M_ago=CLOSE_A_3M_ago_list,
        CLOSE_B_3M_ago=CLOSE_B_3M_ago_list,
        DATE_6M_ago=DATE_6M_ago_list,
        CLOSE_A_6M_ago=CLOSE_A_6M_ago_list,
        CLOSE_B_6M_ago=CLOSE_B_6M_ago_list,
        DATE_1y_ago=DATE_1y_ago_list,
        CLOSE_A_1y_ago=CLOSE_A_1y_ago_list,
        CLOSE_B_1y_ago=CLOSE_B_1y_ago_list,
        DATE_2y_ago=DATE_2y_ago_list,
        CLOSE_A_2y_ago=CLOSE_A_2y_ago_list,
        CLOSE_B_2y_ago=CLOSE_B_2y_ago_list)

    # print(corr_df_new)
    # corr_df_new['ABS_SIGMA'] = np.abs(corr_df_new['SIGMA'])
    corr_df_new = corr_df_new.sort_values('total_profit', ascending=False)

    corr_df_new = corr_df_new.loc[:, [
        'SYM_A',
        'SYM_A_NAME',
        'SYM_A_INDUSTRY',
        'OPEN_A',
        'CLOSE_A',
        'AXIS_LOT_SIZE',
        'TRADE_A',
        'SYM_B',
        'SYM_B_NAME',
        'SYM_B_INDUSTRY',
        'OPEN_B',
        'CLOSE_B',
        'PAIR_LOT_SIZE',
        'TRADE_B',
        'CORR_3M',
        'CORR_1Y',
        'COINT_3M',
        'COINT_1Y',
        'SIGMA',
        'ABS_SIGMA',
        'LAST_DAY_SIGMA',
        'DEV_RATE',
        'LOT_SIZE_DIFF',
        'total_profit',
        'average_profit',
        'average_pl',
        'total_times',
        'plus_times',
        'minus_times',
        'pl_times',
        'open_days',
        'stop_profit_times',
        'stop_loss_times',
        'max_day_over_times',
        'DATE_3M_ago',
        'CLOSE_A_3M_ago',
        'CLOSE_B_3M_ago',
        'DATE_6M_ago',
        'CLOSE_A_6M_ago',
        'CLOSE_B_6M_ago',
        'DATE_1y_ago',
        'CLOSE_A_1y_ago',
        'CLOSE_B_1y_ago',
        'DATE_2y_ago',
        'CLOSE_A_2y_ago',
        'CLOSE_B_2y_ago',
    ]]

    file_util.write_csv(corr_df_new,
                        os.path.join(resultDir, corr_result_file_name))
    if (isFastCaculateMode == False):
        file_util.write_csv(
            corr_df_new,
            os.path.join(setting.get_master_dir(), corr_result_file_name))

    # with pd.ExcelWriter(report_file) as writer:
    #corr_df_new.to_excel(writer, sheet_name='CORR')

    #writer.save()
    #writer.close()

    print('Output Report Process end!')
示例#6
0
def main(args):
    start_time = datetime.now()
    print('maint start ' + strftime("%Y-%m-%d %H:%M:%S"))

    isFastCaculateMode = False
    if (len(args) >= 2 and (args[1] == 'fast' or args[1] == 'FAST')):
        print('FAST CACULATE MODE')
        isFastCaculateMode = True

    file_util.clean_target_dir(setting.get_result_dir())

    # get all target stock symbols
    symbols = file_util.getAllTargetSymbols(setting.get_input_data_dir())

    print('Total symbols size:' + str(len(symbols)))
    index1 = 0
    symbols_corr_list = []
    symbol_check_dict = {}

    if (isFastCaculateMode == True):
        _pais = file_util.read_csv(setting.get_currenty_report_file())

        for index, row in _pais.iterrows():
            index1 = index1 + 1
            symb1 = str(int(row.SYM_A))
            symb2 = str(int(row.SYM_B))
            print('Processing {0}/{1} {2} - {3}...'.format(
                index1, len(_pais), symb1, symb2))

            _pairs = create_pairs_dataframe(setting.get_input_data_dir(),
                                            symb1, symb2)
            corr_3m, corr_1y = trade_util.check_corr(_pairs, symb1, symb2)

            coint_3m, coint_1y = trade_util.check_cointegration(
                _pairs, symb1, symb2)

            if not is_available_pari_data(_pairs, symb1, symb2, corr_3m,
                                          corr_1y, coint_3m, coint_1y):
                continue

            symbols_corr_list.append(
                [symb1, symb2, corr_3m, corr_1y, coint_3m, coint_1y])

            _pairs = _pairs.sort_values('DATE', ascending=True)
            _pairs = calculate_spread_zscore(_pairs, symb1, symb2)
            _pairs = _pairs.sort_values('DATE', ascending=False)

            file_util.write_csv(
                _pairs,
                os.path.join(setting.get_result_dir(),
                             symb1 + '_' + symb2 + '.csv'))
    else:
        for symb1 in symbols:
            index1 = index1 + 1
            print('Processing {0}/{1} {2}...'.format(index1, len(symbols),
                                                     symb1))
            for symb2 in symbols:
                # index2 =index2+1
                #  print('Processing {0}/{1}/{2} {3}-{4}...'.format(index2,index1, len(symbols), symb1, symb2))
                if (symb1 == symb2 or (symb1 + symb2) in symbol_check_dict
                        or (symb2 + symb1) in symbol_check_dict):
                    continue
                symbol_check_dict[symb1 + symb2] = ''

                _pairs = create_pairs_dataframe(setting.get_input_data_dir(),
                                                symb1, symb2)
                corr_3m, corr_1y = trade_util.check_corr(_pairs, symb1, symb2)

                coint_3m, coint_1y = trade_util.check_cointegration(
                    _pairs, symb1, symb2)

                if not is_available_pari_data(_pairs, symb1, symb2, corr_3m,
                                              corr_1y, coint_3m, coint_1y):
                    continue

                symbols_corr_list.append(
                    [symb1, symb2, corr_3m, corr_1y, coint_3m, coint_1y])

                _pairs = _pairs.sort_values('DATE', ascending=True)
                _pairs = calculate_spread_zscore(_pairs, symb1, symb2)
                _pairs = _pairs.sort_values('DATE', ascending=False)

                file_util.write_csv(
                    _pairs,
                    os.path.join(setting.get_result_dir(),
                                 symb1 + '_' + symb2 + '.csv'))

        # print(symbols_corr_list)

    corr_data = sorted(symbols_corr_list, key=itemgetter(3),
                       reverse=True)  # sort by 3 month corr
    corr_data = pd.DataFrame(columns=[
        'SYM_A', 'SYM_B', 'CORR_3M', 'CORR_1Y', 'COINT_3M', 'COINT_1Y'
    ],
                             data=corr_data)
    # file_util.write_csv(corr_data, os.path.join(setting.get_result_dir(), corr_result_file_name))

    output_report(corr_data, isFastCaculateMode, setting.get_result_dir(),
                  setting.corr_result_file_name)

    process_time = datetime.now() - start_time
    print('main end!' + strftime("%Y-%m-%d %H:%M:%S"))
    print('Time cost:{0}'.format(process_time))
示例#7
0
def signal_generate(pairs,
                    symbol_Axis,
                    symbol_Pair,
                    save_portfolio_file_path,
                    z_entry_threshold=2,
                    z_exit_threshold=0,
                    entry_max_days=20,
                    stop_loss_rate=-0.05,
                    stop_profit_rate=0.05):

    isUseExitThreshold = False
    pairs = pairs.sort_values('DATE', ascending=True)
    pairs['DATE'] = pd.to_datetime(pairs['DATE'])
    pairs.index = pairs['DATE']

    # print(datetime.today() - relativedelta(years=1))
    # pairs[pairs.index > (datetime.today() - relativedelta(years=1))]

    pairs['axis_A_long'] = (pairs['saya_divide_sigma'] <=
                            -z_entry_threshold) * 1.0
    pairs['axis_A_short'] = (pairs['saya_divide_sigma'] >=
                             z_entry_threshold) * 1.0
    pairs['axis_A_exit_long'] = (pairs['saya_divide_sigma'] >=
                                 -1 * z_exit_threshold) * 1.0
    pairs['axis_A_exit_short'] = (pairs['saya_divide_sigma'] <=
                                  z_exit_threshold) * 1.0

    #pairs = pairs.sort_values('DATE', ascending=True)
    #print(pairs)

    position = {}
    portfolio_list = []
    last_row_index = ''
    haveUnsettledPostion = False

    CLOSE_STOP_LOSS = ''

    for index, row in pairs.iterrows():

        if numpy.isnan(row['saya_divide_sigma']):
            last_row_index = index
            continue

        OPEN_CAT = ''
        CLOSE_CAT = ''

        if haveUnsettledPostion:
            #print(position)
            _cat = position['OPEN_CAT']

            open_days = int(
                (index - position['OPEN_DATE']) / np.timedelta64(1, 'D'))

            if len(CLOSE_STOP_LOSS) <= 0:
                axis_now_price = row['CLOSE_' + symbol_Axis]
                pair_now_price = row['CLOSE_' + symbol_Pair]
                axis_open_price = position['AXIS_SYMB_OPEN_PRI']
                pair_open_price = position['PAIR_SYMB_OPEN_PRI']
                axis_now_lot_size = position['AXIS_SYMB_LOT']
                pair_now_lot_size = position['PAIR_SYMB_LOT']
                total_mount = axis_open_price * axis_now_lot_size + pair_open_price * pair_now_lot_size

                if 'BUY' == _cat:
                    now_profit = (axis_now_price -
                                  axis_open_price) * axis_now_lot_size + (
                                      pair_open_price -
                                      pair_now_price) * pair_now_lot_size
                else:
                    now_profit = (axis_open_price -
                                  axis_now_price) * axis_now_lot_size + (
                                      pair_now_price -
                                      pair_open_price) * pair_now_lot_size

                if now_profit < 0 and now_profit / total_mount <= stop_loss_rate:
                    CLOSE_STOP_LOSS = 'STOP_LOSS_OVER_RATIO'
                elif now_profit > 0 and now_profit / total_mount >= stop_profit_rate:
                    CLOSE_STOP_LOSS = 'STOP_PROFIT_OVER_RATIO'

                if len(CLOSE_STOP_LOSS) > 0:
                    continue

            if isUseExitThreshold and 'BUY' == _cat and pairs.at[
                    last_row_index, 'axis_A_exit_long'] == 1:
                CLOSE_CAT = 'CLOSE_BUY'
            elif isUseExitThreshold and 'SELL' == _cat and pairs.at[
                    last_row_index, 'axis_A_exit_short'] == 1:
                CLOSE_CAT = 'CLOSE_SELL'
            elif open_days > entry_max_days:  # Stop Loss by Open days
                CLOSE_CAT = 'SL_MAX_DAY_OVER'

            if len(CLOSE_CAT) > 0 or len(CLOSE_STOP_LOSS) > 0:
                axisClosePrice = row['OPEN_' + symbol_Axis]
                pairClosePrice = row['OPEN_' + symbol_Pair]

                position['CLOSE_DATE'] = index

                position['OPEN_DAYS'] = open_days
                position['CLOSE_CAT'] = CLOSE_CAT
                if len(CLOSE_STOP_LOSS) > 0:
                    position['CLOSE_CAT'] = CLOSE_STOP_LOSS

                position['AXIS_SYMB_CLOSE_PRI'] = axisClosePrice
                position['PAIR_SYMB_CLOSE_PRI'] = pairClosePrice

                axis_lot_size = position['AXIS_SYMB_LOT']
                pair_lot_size = position['PAIR_SYMB_LOT']
                position['AXIS_CLOSE_MOUNT'] = axisClosePrice * axis_lot_size
                position['PAIR_CLOSE_MOUNT'] = pairClosePrice * pair_lot_size

                axisOpenPrice = position['AXIS_SYMB_OPEN_PRI']
                pairOpenPrice = position['PAIR_SYMB_OPEN_PRI']

                haveUnsettledPostion = False
                CLOSE_STOP_LOSS = ''

                if CLOSE_CAT == 'CLOSE_BUY' or 'BUY' == _cat:
                    total = (
                        axisClosePrice - axisOpenPrice) * axis_lot_size + (
                            pairOpenPrice - pairClosePrice) * pair_lot_size
                elif CLOSE_CAT == 'CLOSE_SELL' or 'SELL' == _cat:
                    total = (
                        axisOpenPrice - axisClosePrice) * axis_lot_size + (
                            pairClosePrice - pairOpenPrice) * pair_lot_size

                position['TOTAL'] = total
                trade_commission = trade_util.get_trade_commission(
                    axisOpenPrice, pairOpenPrice, axis_lot_size, pair_lot_size)
                position['COMMISSION_N'] = trade_commission
                credit_commission = trade_util.get_credit_commission(
                    axisOpenPrice, pairOpenPrice, axis_lot_size, pair_lot_size,
                    open_days)
                position['COMMISSION_CREDIT'] = credit_commission

                profit = total - trade_commission - credit_commission
                position['PROFIT'] = profit

                pl = round(
                    profit / (position['AXIS_OPEN_MOUNT'] +
                              position['PAIR_OPEN_MOUNT']) * 100, 2)
                position['PL'] = pl

                #print(portfolio_list)
                portfolio_list.append(position)

        else:

            axisOpenPrice = row['OPEN_' + symbol_Axis]
            pairOpenPrice = row['OPEN_' + symbol_Pair]
            axis_lot_size, pair_lot_size, lot_size_diff = trade_util.get_lot_size(
                axisOpenPrice, pairOpenPrice)

            if pairs.at[
                    last_row_index,
                    'axis_A_long'] == 1 and lot_size_diff < setting.MAX_OPEN_PRICE_DIFF:
                OPEN_CAT = 'BUY'  # BUY AXIS COMBOL

            elif pairs.at[
                    last_row_index,
                    'axis_A_short'] == 1 and lot_size_diff < setting.MAX_OPEN_PRICE_DIFF:
                OPEN_CAT = 'SELL'

            if len(OPEN_CAT) > 0:

                sigma = round(pairs.at[last_row_index, 'saya_divide_sigma'], 2)
                haveUnsettledPostion = True

                position = {
                    'AXIS_SYMBOL': symbol_Axis,
                    'PAIR_SYMBOL': symbol_Pair,
                    'OPEN_DATE': index,
                    'SIGMA': sigma,
                    'OPEN_CAT': OPEN_CAT,
                    "AXIS_SYMB_OPEN_PRI": axisOpenPrice,
                    'AXIS_SYMB_LOT': axis_lot_size,
                    'AXIS_OPEN_MOUNT': axisOpenPrice * axis_lot_size,
                    'PAIR_SYMB_OPEN_PRI': pairOpenPrice,
                    'PAIR_SYMB_LOT': pair_lot_size,
                    'PAIR_OPEN_MOUNT': pairOpenPrice * pair_lot_size,
                    'LOT_DIFF(%)': lot_size_diff,
                }

        last_row_index = index

    if len(portfolio_list) > 0:
        pd_portfolio_list = pd.DataFrame(portfolio_list)
        # print(pd_portfolio_list)
        file_util.write_csv(
            pd_portfolio_list,
            os.path.join(save_portfolio_file_path,
                         symbol_Axis + '_' + symbol_Pair + '_portfolio.csv'))

        total_profit = round(pd_portfolio_list['PROFIT'].sum())
        average_profit = round(pd_portfolio_list['PROFIT'].mean())
        average_pl = round(pd_portfolio_list['PL'].mean(), 2)
        total_times = pd_portfolio_list.shape[0]
        plus_times = pd_portfolio_list[
            pd_portfolio_list['PROFIT'] > 0].shape[0]
        minus_times = pd_portfolio_list[
            pd_portfolio_list['PROFIT'] <= 0].shape[0]
        open_days = round(pd_portfolio_list['OPEN_DAYS'].mean())

        stop_profit_times = pd_portfolio_list[
            pd_portfolio_list['CLOSE_CAT'] ==
            'STOP_PROFIT_OVER_RATIO'].shape[0]
        stop_loss_times = pd_portfolio_list[pd_portfolio_list['CLOSE_CAT'] ==
                                            'STOP_LOSS_OVER_RATIO'].shape[0]
        max_day_over_times = pd_portfolio_list[pd_portfolio_list['CLOSE_CAT']
                                               == 'SL_MAX_DAY_OVER'].shape[0]

    else:
        total_profit = 0
        average_profit = 0
        average_pl = 0
        total_times = 0
        plus_times = 0
        minus_times = 0
        open_days = 0
        stop_profit_times = 0
        stop_loss_times = 0
        max_day_over_times = 0

    return total_profit, average_profit, average_pl, total_times, plus_times, minus_times, open_days, \
           stop_profit_times, stop_loss_times, max_day_over_times