コード例 #1
0
    def initializes(self):
        """ 
        initialize the data to be used within generate
        """
        # Universe
        UH = UniHandler(self.startdate,self.enddate,True,self.altsim_dir)
        uni = UH.build_uni(self.uni_name,'aws_dev')
        uni = UH.filter_uni(uni,self.set_exch,self.set_base,self.set_quote,self.set_exclude,self.set_short_case)
        uni['BTC-USDT-BINA'] = True
        # Data
        DH = DataHandler(self.startdate,self.enddate,self.backdays,self.interval,self.download,self.altsim_dir)
        # include data you will use
        ohlcv = DH.build_data(uni,'ohlcv','aws_exchanges')
        ohlcv_close = apply_to_matrix(ohlcv,'close','ticker',self.startdate,self.enddate,self.interval)
        ohlcv_close = backfill(ohlcv_close)

        # Remove tickers that dont have data
        uni = apply_filter_no_data_tickers(uni, ohlcv_close)

        # Alpha
        AH = AlphaHandler(self.startdate,self.enddate,self.interval)
        alpha = AH.build_df(uni)
        del alpha['BTC-USDT-BINA']

        # Recalculate */BTC to */USDT for BINA
        for col in alpha:
            if col.split('-')[2] == 'BINA':
                x = ohlcv_close.loc[:,col]
                btc = ohlcv_close.loc[:,'BTC-USDT-BINA']
                for i in x.index:
                    x.loc[i] = x.loc[i] * btc.loc[i]

        return uni, alpha, ohlcv, ohlcv_close
コード例 #2
0
ファイル: jaekim002.py プロジェクト: pdxtrader/billions
    def initializes(self):
        """ 
        initialize the data to be used within generate
        """
        # Universe
        UH = UniHandler(self.startdate, self.enddate, True, self.altsim_dir)
        uni = UH.build_uni(self.uni_name, 'aws_dev')
        uni = UH.filter_uni(uni, self.set_exch, self.set_base, self.set_quote,
                            self.set_exclude, self.set_short_case)

        # Data
        DH = DataHandler(self.startdate, self.enddate, self.backdays,
                         self.interval, self.download, self.altsim_dir)
        # include data you will use
        ohlcv = DH.build_data(uni, 'ohlcv', 'aws_exchanges')
        ohlcv_close = apply_to_matrix(ohlcv, 'close', 'ticker', self.startdate,
                                      self.enddate, self.interval)
        ohlcv_close = backfill(ohlcv_close)
        #import pdb; pdb.set_trace()

        # Remove tickers that dont have data
        uni = apply_filter_no_data_tickers(uni, ohlcv_close)

        # Alpha
        AH = AlphaHandler(self.startdate, self.enddate, self.interval)
        alpha = AH.build_df(uni)

        return uni, alpha, ohlcv, ohlcv_close
コード例 #3
0
ファイル: stats_analyze.py プロジェクト: pdxtrader/billions
def run():
    parser = argparse.ArgumentParser(description='Run backtest on generate.py')
    parser.add_argument('-s',
                        '--start',
                        help='start date of universe creation ex. 2017-01-01.',
                        type=str)
    parser.add_argument('-e',
                        '--end',
                        help='end date of universe creation ex. 2017-01-01.',
                        type=str)
    parser.add_argument('-f',
                        '--file',
                        help='file directory of generate.py',
                        type=str,
                        default='generate.py')
    parser.add_argument('-b',
                        '--book',
                        help='booksize to be tested',
                        type=int,
                        default='1000000')
    parser.add_argument('-i',
                        '--interval',
                        help='interval',
                        type=int,
                        default='5')
    parser.add_argument('--tcost',
                        help='assign tcost',
                        type=float,
                        default='0.0')
    parser.add_argument('--altsim_dir',
                        help='dir of altsim',
                        type=str,
                        default='')
    args = parser.parse_args()
    startdate = args.start
    enddate = args.end
    file_dir = args.file
    book = args.book
    interval = args.interval
    tcost = args.tcost
    altsim_dir = args.altsim_dir

    pnl_dir = file_dir[:-8] + '_pnlanalyze.csv'
    pos = pd.read_csv(file_dir, index_col=0)
    pos.index = pd.to_datetime(pos.index)
    #import pdb; pdb.set_trace()
    mask = (pos.index >= pd.to_datetime(startdate)) & (pos.index <=
                                                       pd.to_datetime(enddate))
    pos = pos.loc[mask]

    # Cases where EXCH instruments are BTC/ETH etc underlying
    for i in pos:
        split_col = i.split('-')
        if split_col[2] == 'BTRX':
            pos['BTC-USDT-BTRX'] = np.nan
            break

    DH = DataHandler(startdate, enddate, 1, 5, False, altsim_dir)
    ohlcv = DH.build_data(pos, 'ohlcv', 'aws_exchanges')
    ohlcv_close = apply_to_matrix(ohlcv, 'close', 'ticker', startdate, enddate,
                                  5)
    ohlcv_close = backfill(ohlcv_close)

    #import pdb; pdb.set_trace()
    ohlcv_close = apply_delay_keep(ohlcv_close, 1)

    # Recalculate */BTC to */USDT for BTRX
    for col in ohlcv_close:
        if col.split('-')[2] == 'BTRX' and col != 'BTC-USDT-BTRX':
            x = ohlcv_close.loc[:, col]
            btc = ohlcv_close.loc[:, 'BTC-USDT-BTRX']
            for i in x.index:
                #import pdb; pdb.set_trace()
                x.loc[i] = x.loc[i] * btc.loc[i]

    stats = get_stats(pos, ohlcv_close, book, 0.0, interval, tcost=tcost)
    if os.path.isfile(pnl_dir):
        os.remove(pnl_dir)
    save_file(pnl_dir, stats, 'PnL')