def candle_df(candles, candleamount): print("candle_df") # iterate over rows with iterrows() cpool = ThreadPool() #for index, data in candles.tail(candleamount).iterrows(): #candle_df_thread(index, data) indices = candles.tail(candleamount).index.values.tolist() data = candles.tail(candleamount).values.tolist() results = cpool.uimap(candle_df_thread, indices, data) print( "Computing candlestick dataframe for given params with candles multithreaded..." ) result = list(results) print(results) return (result)
def saveEngulfingSignals(candles, candleamount, params=[], symbol='XBTUSD'): global t_e_candles global t_symbol global t_candleamount t_e_candles = ind.candle_df(candles, candleamount) t_symbol = symbol t_candleamount = candleamount epool = ThreadPool() results = epool.uimap(saveEngulf_thread, params) print("Computing engulfing signals for all params multithreaded...") #DO NOT REMOVE THIS PRINT, IT IS NEEDED TO FINISH THE MULTITHREAD result = list(results) print(result) return (result) #Examples #saveKeltnerBands(100, [10,1], [True, False]) #saveATR(100, [1,20,30])
def backtest_mt(params): global capital su = None saveIndicators(candleamount=candleamount) #fix later candleSplice = candleData.tail(candleamount) atrseries = pd.Series(dtype=np.uint16) keltner_signals = pd.Series(dtype=object) engulf_signals = pd.Series(dtype=object) signals = pd.DataFrame(columns=['S']) atrperiod = params['atrperiod'] #candleSplice = candleSplice.reset_index(drop=True) if (params['keltner'] == True) and (params['engulf'] == True): engulf_signals = pd.read_csv( 'IndicatorData//' + params['symbol'] + '//Engulfing//' + "SIGNALS_t" + str(params['engulfthreshold']) + '_ignoredoji' + str(params['ignoredoji']) + '.csv', sep=',') keltner_signals = pd.read_csv('IndicatorData//' + params['symbol'] + '//Keltner//' + "SIGNALS_kp" + str(params['kperiod']) + '_sma' + str(params['ksma']) + '.csv', sep=',') signals = pd.concat([engulf_signals, keltner_signals], axis=1) signals.columns = ["E", "K"] signals['S'] = np.where((signals['E'] == signals['K']), Signal(0), signals['E']) elif (params['keltner'] == True): keltner_signals = pd.read_csv('IndicatorData//' + params['symbol'] + '//Keltner//' + "SIGNALS_kp" + str(params['kperiod']) + '_sma' + str(params['ksma']) + '.csv', sep=',') signals['S'] = np.array(keltner_signals).reshape( 1, len(keltner_signals))[0] elif (params['engulf'] == True): engulf_signals = pd.read_csv( 'IndicatorData//' + params['symbol'] + '//Engulfing//' + "SIGNALS_t" + str(params['engulfthreshold']) + '_ignoredoji' + str(params['ignoredoji']) + '.csv', sep=',') signals['S'] = np.array(engulf_signals).reshape( 1, len(engulf_signals))[0] print(signals['S']) #signals.to_csv('BacktestData//Signals//' + currentTime + '.csv') atrseries = pd.read_csv('IndicatorData//' + params['symbol'] + "//ATR//" + "p" + str(atrperiod) + '.csv', sep=',') copyIndex = candleSplice.index candleSplice = candleSplice.reset_index(drop=True) #candleSplice.merge(atrseries, left_index=True) #candleSplice.merge(signals['S'], right_on='S', left_index=True) candleSplice = pd.DataFrame.join(candleSplice, atrseries) candleSplice = pd.DataFrame.join( candleSplice, signals['S']) #COMBINE SIGNALS AND CANDLE DATA candleSplice.index = copyIndex candleSplice['timestamp'] = pd.to_datetime(candleSplice.timestamp) finalCapitalData = None currentTime = datetime.now().strftime("%Y%m%d-%H%M") backtestDir = params['symbol'] + '//' + "len" + str( candleamount) + "_k" + str(params['keltner']) + "_e" + str( params['engulf'] ) + "_id" + str(params['ignoredoji']) + "_eThrs" + str( params['engulfthreshold'] ) + "_ATR" + str(params['atrperiod']) + "_kP" + str( params['kperiod']) + "_kSMA" + str(params['ksma']) + "_pm" + str( params['posmult']) + "_ST" + params['stoptype'] + "_sm" + str( params['stopmult']) + "_tm" + str( params['tmult']) + "_TR" + params['trade'] bt_profit = 0 if (percision != 1): isafe = [] candleSplit = [] initialLength = len(candleSplice) firstStart = candleSplice.index[0] lastDistanceSafe = None if params['symbol'] == 'XBTUSD': su = xbtusd_su elif params['symbol'] == 'ETHUSD': su = ethusd_su for i in range(percision - 1): #abs() is a temporary fix to running the backtest on short intervals isafe.append((i + 1) * ((abs(initialLength - percision * su)) / percision) + i * su) #candleSplit = list(np.array_split(candleSplice, percision)) #candleSplit = list(candleSplit) for i in isafe: ia = int(i) if isafe.index(i) != 0: candleSplit.append(candleSplice.iloc[int(isafe[isafe.index(i) - 1]):ia + 1]) lastDistanceSafe = ia #print("lds", lastDistanceSafe) # else: #candleSplit.append(candleSplice.iloc[:ia+1]) #print("lds", lastDistanceSafe) #if(len(isafe) > 1): candleSplit.append(candleSplice.iloc[lastDistanceSafe:]) #print(candleSplit) #time.sleep(100) #generate parameters for multithreading safe_length = len(candleSplit) safe_candleamount = np.repeat(candleamount, safe_length).tolist() safe_capital = np.repeat(capital, safe_length).tolist() safe_params = np.repeat(params, safe_length).tolist() withSafe = np.repeat(True, safe_length).tolist() print("safe thread amount:", safe_length) #create multithread pool start = time.time() #print(candleSplit) #time.sleep(1000) pool = ThreadPool(safe_length) #run initial chunks multithreaded to find safepoints safe_results = pool.uimap(backtest_strategy, safe_candleamount, safe_capital, safe_params, candleSplit, withSafe) pool.close() #Compute anything we need to while threads are running candleSafe = [] final_length = safe_length + 2 withoutSafe = np.repeat(False, final_length).tolist() final_candleamount = np.repeat(candleamount, final_length).tolist() final_capital = np.repeat(capital, final_length).tolist() final_params = np.repeat(params, final_length).tolist() static_capital = capital safePoints = list(safe_results) ###################################### #time.sleep(1000) pool.join() for i in safePoints: if i == -1: backtest_mt.q.put( 'Not all safe points found for given percision. Reduce percision, or increase timeframe' ) return safePoints = sorted(safePoints) if find_su: su = [] for i, point in enumerate(safePoints): su.append(point - candleSplit[i].index[0]) suAvg = mean(su) #only works on evenly spliced chunks chunkLength = len(candleSplit[0]) backtest_mt.q.put(["su average:", suAvg, ' / ', chunkLength]) return (su) print("safe points:", safePoints) idx = 0 for i in safePoints: ia = i - firstStart idx = safePoints.index(i) if safePoints.index(i) != 0: candleSafe.append(candleSplice.iloc[lastDistanceSafe - idx:ia + 1]) lastDistanceSafe = ia + 1 else: candleSafe.append(candleSplice.iloc[:ia + 1]) lastDistanceSafe = ia + 1 candleSafe.append(candleSplice.iloc[lastDistanceSafe - idx:]) print("final thread amount:", final_length) #print(candleSafe) #time.sleep(10000) fpool = ThreadPool(final_length) final_results = fpool.uimap(backtest_strategy, final_candleamount, final_capital, final_params, candleSafe, withoutSafe) fpool.close() final_result = list(final_results) fpool.join() ordered_result = sorted(final_result, key=lambda x: x[0]) for i in range(len(ordered_result)): #print(final_result.index) if i != 0: #for non-static position size: ##capital += capital*((i[1]-static_capital)/static_capital) ordered_result[i][1]['capital'] += bt_profit bt_profit = ordered_result[i][1].iloc[-1][ 'capital'] - static_capital finalCapitalData = pd.concat( [finalCapitalData, ordered_result[i][1]], ignore_index=True) else: bt_profit = ordered_result[i][1].iloc[-1][ 'capital'] - static_capital finalCapitalData = pd.DataFrame(ordered_result[i][1]) capital = finalCapitalData['capital'].iloc[-1] else: #run chunks spliced by safepoints multithreaded to retrieve fully accurate results final_results = backtest_strategy(candleamount, capital, params, candleSplice, False) final_result = list(final_results) capital = str(final_result[1]['capital'].iloc[-1]) finalCapitalData = final_result[1] print(finalCapitalData) #time.sleep(1000) visualize_trades(finalCapitalData, backtestDir) saveBacktest(capital, params, backtestDir) backtest_mt.q.put(capital) end = time.time() print("Thread time: ", end - start) return ('done')