示例#1
0
def calculatePerformanceForTable(table, tickerOrder, joinedData):
    aggregatePerformance = None
    for i in range(len(tickerOrder)):
        dailyFactorReturn = dataAck.getDailyFactorReturn(tickerOrder[i], joinedData)
        thisPerformance = table[[table.columns[i]]].join(dailyFactorReturn).apply(lambda x:x[0] * x[1], axis=1)
        thisPerformance = pd.DataFrame(thisPerformance, columns=[table.columns[i]])
        if aggregatePerformance is None:
            aggregatePerformance = thisPerformance
        else:
            aggregatePerformance = aggregatePerformance.join(thisPerformance)
    return aggregatePerformance.dropna()
    def runModelHistorical(self, dataOfInterest):
        dailyFactorReturn = dataAck.getDailyFactorReturn(
            self.targetTicker, dataOfInterest)
        predictions = pd.DataFrame(dailyFactorReturn.apply(
            lambda x: -1.0 if self.position == 0 else 1.0, axis=1),
                                   columns=["Predictions"]).dropna()
        transformedPreds = predictions.join(dailyFactorReturn).dropna()
        returnStream = pd.DataFrame(transformedPreds.apply(
            lambda x: x[0] * x[1], axis=1),
                                    columns=["Algo Return"])

        return returnStream, dailyFactorReturn, predictions, returnStream, predictions
示例#3
0
    def runModelHistorical(self, dataOfInterest):

        ##RAW PREDICTIONS ARE PREDS 0->1.0
        returnStream, factorReturn, predictions1, slippageAdjustedReturn, rawPredictions1 = self.obj1.runModelHistorical(
            dataOfInterest)
        returnStream, factorReturn, predictions2, slippageAdjustedReturn, rawPredictions2 = self.obj2.runModelHistorical(
            dataOfInterest)

        positions = predictions1.join(predictions2, rsuffix="2").dropna()

        # print(rawPredictions)
        #averagePredictions
        positionsTable = pd.DataFrame(
            positions.apply(lambda x: self.combinePredictions(x),
                            axis=1,
                            raw=True))
        print("POSITIONS TABLE")
        print(positionsTable)

        ##PREDICTIONS COMBINED AS 0, 0.5, 1 where

        returnStream = None
        factorReturn = None
        predictions = None
        slippageAdjustedReturn = None

        positionsTable.columns = ["Positions"]
        positionsTable = positionsTable.dropna()
        rawPositions = positionsTable
        ##AVERAGE...A LOT OF SUBTLETY IN STRENGTH OF PREDICTION
        dailyFactorReturn = dataAck.getDailyFactorReturn(
            self.targetTicker, dataOfInterest)
        transformedPositions = positionsTable.join(dailyFactorReturn).dropna()
        returnStream = pd.DataFrame(transformedPositions.apply(
            lambda x: x[0] * x[1], axis=1),
                                    columns=["Algo Return"])
        factorReturn = pd.DataFrame(transformedPositions[["Factor Return"]])
        positions = pd.DataFrame(transformedPositions[["Positions"]])
        estimatedSlippageLoss = portfolioGeneration.estimateTransactionCost(
            positions)
        estimatedSlippageLoss.columns = returnStream.columns
        slippageAdjustedReturn = (returnStream -
                                  estimatedSlippageLoss).dropna()

        return returnStream, factorReturn, positions, slippageAdjustedReturn, rawPositions
def generateAggregateReturnsPredictions(allModels, joinedData):
    aggregateReturns = None
    aggregatePredictions = None
    for model in allModels:
        preds = downloadAggregatePredictions(model).tz_localize(None)
        dailyFactorReturn = dataAck.getDailyFactorReturn(
            model.targetTicker, joinedData)
        transformedPreds = preds.join(dailyFactorReturn).dropna()
        returnStream = pd.DataFrame(transformedPreds.apply(
            lambda x: x[0] * x[1], axis=1),
                                    columns=[model.getHash()])
        preds.columns = [model.getHash()]
        if aggregateReturns is None:
            aggregateReturns = returnStream
            aggregatePredictions = preds
        else:
            aggregateReturns = aggregateReturns.join(returnStream)
            aggregatePredictions = aggregatePredictions.join(preds)
    return aggregateReturns, aggregatePredictions
def computeReturnsForUniqueModelsCache(treeModels, factorToTrade):
    tickersRequired = []
    for mod in treeModels:
        if mod.targetTicker not in tickersRequired:
            tickersRequired.append(mod.targetTicker)

    if factorToTrade not in tickersRequired:
        tickersRequired.append(factorToTrade)

    print(tickersRequired)

    pulledData, validTickers = dataAck.downloadTickerData(tickersRequired)

    joinedData = dataAck.joinDatasets(
        [pulledData[ticker] for ticker in pulledData])

    modelReturns, modelPredictions, modelSlippageReturns, cleanedModels = generateAllReturnsFromCache(
        treeModels)

    return cleanedModels, modelReturns, modelPredictions, modelSlippageReturns, modelReturns.join(
        dataAck.getDailyFactorReturn(factorToTrade,
                                     joinedData)).dropna(), joinedData
def getDataForPortfolio(portfolioKey, factorToTrade, joinedData,
                        availableStartDate):
    modelHashes = portfolio.getPortfolioModels(portfolioKey)
    models = getModelsByKey(modelHashes)
    for model in models:
        print(model.describe())
    ##GENERATE RETURNS FOR PORTFOLIO
    portfolioAllocations = portfolio.getPortfolioAllocations(portfolioKey)

    predsTable = pd.DataFrame([])
    weightsTable = pd.DataFrame([])
    tickerAllocationsTable = pd.DataFrame([])
    scaledTickerAllocationsTable = pd.DataFrame([])
    for allocation in portfolioAllocations:
        colsAlgo = []
        valsAlgo = []
        colsAlgoWeight = []
        valsAlgoWeight = []
        colsTicker = []
        valsTicker = []
        colsTickerScaled = []
        valsTickerScaled = []

        for key in allocation:
            if key.startswith("ticker_"):
                colsTicker.append(key[len("ticker_"):])
                valsTicker.append(allocation[key])
            if key.startswith("scaled_ticker_"):
                colsTickerScaled.append(key[len("scaled_ticker_"):])
                valsTickerScaled.append(
                    abs(allocation[key]) if np.isnan(allocation[key]) ==
                    False else 0.0)
            if key.startswith("algo_") and not key.startswith("algo_weight_"):
                colsAlgo.append(key[len("algo_"):])
                valsAlgo.append(allocation[key])
            if key.startswith("algo_weight_"):
                colsAlgoWeight.append(key[len("algo_weight_"):])
                valsAlgoWeight.append(allocation[key])

        predsTable = pd.concat([
            predsTable,
            pd.DataFrame([valsAlgo],
                         index=[allocation["predictionDay"]],
                         columns=colsAlgo).tz_localize(None)
        ])
        weightsTable = pd.concat([
            weightsTable,
            pd.DataFrame([valsAlgoWeight],
                         index=[allocation["predictionDay"]],
                         columns=colsAlgoWeight).tz_localize(None)
        ])
        tickerAllocationsTable = pd.concat([
            tickerAllocationsTable,
            pd.DataFrame([valsTicker],
                         index=[allocation["predictionDay"]],
                         columns=colsTicker).tz_localize(None)
        ])
        scaledTickerAllocationsTable = pd.concat([
            scaledTickerAllocationsTable,
            pd.DataFrame([valsTickerScaled],
                         index=[allocation["predictionDay"]],
                         columns=colsTickerScaled).tz_localize(None)
        ])

    predsTable = predsTable.sort_index()
    weightsTable = weightsTable.sort_index().fillna(0)
    tickerAllocationsTable = tickerAllocationsTable.sort_index().fillna(0)
    scaledTickerAllocationsTable = scaledTickerAllocationsTable.sort_index(
    ).fillna(0)

    rawTickerPerformance = portfolioGeneration.calculatePerformanceForTable(
        tickerAllocationsTable, tickerAllocationsTable.columns, joinedData)

    rawAlgoPerformance = pd.DataFrame(
        rawTickerPerformance.apply(lambda x: sum(x), axis=1),
        columns=["Algo Return Without Commissions"])

    tickerPerformance, algoPerformance, algoTransactionCost = portfolioGeneration.calculatePerformanceForAllocations(
        tickerAllocationsTable, joinedData)

    benchmark = portfolio.getPortfolioByKey(portfolioKey)["benchmark"]
    factorReturn = dataAck.getDailyFactorReturn(benchmark, joinedData)
    factorReturn.columns = ["Factor Return (" + benchmark + ")"]
    algoPerformance.columns = ["Algo Return"]
    algoVsBenchmark = factorReturn.join(algoPerformance).fillna(0)
    algoVsBenchmark = algoVsBenchmark.join(rawAlgoPerformance).dropna()

    tickerAlphaBetas = []
    for ticker in tickerAllocationsTable.columns.values:
        thisFactorReturn = dataAck.getDailyFactorReturn(ticker, joinedData)
        alpha, beta = empyrical.alpha_beta(algoPerformance, thisFactorReturn)
        tickerAlphaBetas.append({
            "ticker": ticker,
            "alpha": alpha * 100,
            "beta": beta
        })

    ##GET SCALED PERFORMANCE [FULL CAPITAL USED EACH DAY]
    rawTickerPerformanceScaled = portfolioGeneration.calculatePerformanceForTable(
        scaledTickerAllocationsTable, scaledTickerAllocationsTable.columns,
        joinedData)

    rawAlgoPerformanceScaled = pd.DataFrame(
        rawTickerPerformanceScaled.apply(lambda x: sum(x), axis=1),
        columns=["Algo Return Without Commissions"])

    unused, algoPerformanceScaled, algoTransactionCostScaled = portfolioGeneration.calculatePerformanceForAllocations(
        scaledTickerAllocationsTable, joinedData)

    algoPerformanceScaled.columns = ["Algo Return"]
    algoVsBenchmarkScaled = factorReturn.join(algoPerformanceScaled).fillna(0)
    algoVsBenchmarkScaled = algoVsBenchmarkScaled.join(
        rawAlgoPerformanceScaled).dropna()

    ##FORM HASH TO TICKER
    hashToTicker = {}
    for model in models:
        hashToTicker[model.getHash()] = model.targetTicker
    print(hashToTicker)

    individualAlgoPerformance = portfolioGeneration.calculatePerformanceForTable(
        predsTable,
        [hashToTicker[modelHash] for modelHash in predsTable.columns],
        joinedData)

    ##CONVERT TO USABLE OBJECTS
    tickerCols, tickerRows = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(tickerPerformance))
    tickerAllocationsCols, tickerAllocationsRows = portfolioGeneration.convertTableToJSON(
        tickerAllocationsTable[-10:])
    algoCols, algoRows = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(algoPerformance))
    algoVsBenchmarkCols, algoVsBenchmarkRows = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(algoVsBenchmark))
    individualAlgoPerformanceCols, individualAlgoPerformanceRows = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(individualAlgoPerformance))
    scaledAllocationCols, scaledAllocationRows = portfolioGeneration.convertTableToJSON(
        scaledTickerAllocationsTable)
    weightsCols, weightsRows = portfolioGeneration.convertTableToJSON(
        weightsTable)
    alpha, beta = empyrical.alpha_beta(algoPerformance, factorReturn)
    recentAlpha, recentBeta = empyrical.alpha_beta(algoPerformance[-100:],
                                                   factorReturn[-100:])
    recentSharpe = empyrical.sharpe_ratio(algoPerformance[-100:])
    recentReturn = empyrical.cum_returns(
        algoPerformance[-100:]).values[-1][0] * 100
    algoVsBenchmarkColsRecent, algoVsBenchmarkRowsRecent = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(algoVsBenchmark[-100:]))
    commissionCols, commissionRows = portfolioGeneration.convertTableToJSON(
        algoTransactionCost)

    algoVsBenchmarkScaledCols, algoVsBenchmarkScaledRows = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(algoVsBenchmarkScaled))
    commissionScaledCols, commissionScaledRows = portfolioGeneration.convertTableToJSON(
        algoTransactionCostScaled)
    scaledSharpe = empyrical.sharpe_ratio(algoPerformanceScaled)
    scaledReturn = empyrical.annual_return(algoPerformanceScaled)[0] * 100
    scaledVolatility = empyrical.annual_volatility(algoPerformanceScaled) * 100
    scaledAlpha, scaledBeta = empyrical.alpha_beta(algoPerformanceScaled,
                                                   factorReturn)

    algoVsBenchmarkScaledColsRecent, algoVsBenchmarkScaledRowsRecent = portfolioGeneration.convertTableToJSON(
        empyrical.cum_returns(algoVsBenchmarkScaled[-100:]))
    scaledSharpeRecent = empyrical.sharpe_ratio(algoPerformanceScaled[-100:])
    scaledReturnRecent = empyrical.annual_return(
        algoPerformanceScaled[-100:])[0] * 100
    scaledVolatilityRecent = empyrical.annual_volatility(
        algoPerformanceScaled[-100:]) * 100
    scaledAlphaRecent, scaledBetaRecent = empyrical.alpha_beta(
        algoPerformanceScaled[-100:], factorReturn[-100:])

    if len(algoPerformance[availableStartDate:]) > 0:
        ##NORMAL
        availableAlpha, availableBeta = empyrical.alpha_beta(
            algoPerformance[availableStartDate:],
            factorReturn[availableStartDate:])
        availableAlpha = availableAlpha * 100
        availableSharpe = empyrical.sharpe_ratio(
            algoPerformance[availableStartDate:])
        availableReturn = empyrical.cum_returns(
            algoPerformance[availableStartDate:]).values[-1][0] * 100
        algoVsBenchmarkColsAvailable, algoVsBenchmarkRowsAvailable = portfolioGeneration.convertTableToJSON(
            empyrical.cum_returns(algoVsBenchmark[availableStartDate:]))

        ##SCALED
        availableAlphaScaled, availableBetaScaled = empyrical.alpha_beta(
            algoPerformanceScaled[availableStartDate:],
            factorReturn[availableStartDate:])
        availableAlphaScaled = availableAlphaScaled * 100
        availableSharpeScaled = empyrical.sharpe_ratio(
            algoPerformanceScaled[availableStartDate:])
        availableReturnScaled = empyrical.cum_returns(
            algoPerformanceScaled[availableStartDate:]).values[-1][0] * 100
        algoVsBenchmarkColsAvailableScaled, algoVsBenchmarkRowsAvailableScaled = portfolioGeneration.convertTableToJSON(
            empyrical.cum_returns(algoVsBenchmarkScaled[availableStartDate:]))
    else:
        #NORMAL
        availableAlpha, availableBeta = ("NaN", "NaN")
        availableSharpe = "NaN"
        availableReturn = "NaN"
        algoVsBenchmarkColsAvailable, algoVsBenchmarkRowsAvailable = ([], [])

        #SCALED
        availableAlphaScaled, availableBetaScaled = ("NaN", "NaN")
        availableSharpeScaled = "NaN"
        availableReturnScaled = "NaN"
        algoVsBenchmarkColsAvailableScaled, algoVsBenchmarkRowsAvailableScaled = (
            [], [])

    return {
        "tickerCols":
        json.dumps(tickerCols),
        "tickerRows":
        json.dumps(tickerRows),
        "tickerAllocationsCols":
        json.dumps(tickerAllocationsCols),
        "tickerAllocationsRows":
        json.dumps(tickerAllocationsRows),
        "algoCols":
        json.dumps(algoCols),
        "algoRows":
        json.dumps(algoRows),
        "tickerCols":
        json.dumps(tickerCols),
        "tickerRows":
        json.dumps(tickerRows),
        "algoVsBenchmarkCols":
        json.dumps(algoVsBenchmarkCols),
        "algoVsBenchmarkRows":
        json.dumps(algoVsBenchmarkRows),
        "individualAlgoPerformanceCols":
        json.dumps(individualAlgoPerformanceCols),
        "individualAlgoPerformanceRows":
        json.dumps(individualAlgoPerformanceRows),
        "scaledAllocationCols":
        json.dumps(scaledAllocationCols),
        "scaledAllocationRows":
        json.dumps(scaledAllocationRows),
        "weightsCols":
        json.dumps(weightsCols),
        "weightsRows":
        json.dumps(weightsRows),
        "algoSharpe":
        empyrical.sharpe_ratio(algoPerformance),
        "alpha":
        alpha * 100,
        "beta":
        beta,
        "annualReturn":
        empyrical.annual_return(algoPerformance)[0] * 100,
        "annualVolatility":
        empyrical.annual_volatility(algoPerformance) * 100,
        "recentSharpe":
        recentSharpe,
        "recentReturn":
        recentReturn,
        "recentAlpha":
        recentAlpha * 100,
        "recentBeta":
        recentBeta,
        "algoVsBenchmarkColsRecent":
        json.dumps(algoVsBenchmarkColsRecent),
        "algoVsBenchmarkRowsRecent":
        json.dumps(algoVsBenchmarkRowsRecent),
        "commissionCols":
        json.dumps(commissionCols),
        "commissionRows":
        json.dumps(commissionRows),
        "tickerAlphaBetas":
        tickerAlphaBetas,
        "availableAlpha":
        availableAlpha,
        "availableBeta":
        availableBeta,
        "availableSharpe":
        availableSharpe,
        "availableReturn":
        availableReturn,
        "algoVsBenchmarkColsAvailable":
        json.dumps(algoVsBenchmarkColsAvailable),
        "algoVsBenchmarkRowsAvailable":
        json.dumps(algoVsBenchmarkRowsAvailable),
        "algoVsBenchmarkScaledCols":
        json.dumps(algoVsBenchmarkScaledCols),
        "algoVsBenchmarkScaledRows":
        json.dumps(algoVsBenchmarkScaledRows),
        "commissionScaledCols":
        json.dumps(commissionScaledCols),
        "commissionScaledRows":
        json.dumps(commissionScaledRows),
        "scaledReturn":
        scaledReturn,
        "scaledSharpe":
        scaledSharpe,
        "scaledVolatility":
        scaledVolatility,
        "scaledAlpha":
        scaledAlpha * 100,
        "scaledBeta":
        scaledBeta,
        "algoVsBenchmarkScaledColsRecent":
        json.dumps(algoVsBenchmarkScaledColsRecent),
        "algoVsBenchmarkScaledRowsRecent":
        json.dumps(algoVsBenchmarkScaledRowsRecent),
        "scaledReturnRecent":
        scaledReturnRecent,
        "scaledVolatilityRecent":
        scaledVolatilityRecent,
        "scaledAlphaRecent":
        scaledAlphaRecent * 100,
        "scaledBetaRecent":
        scaledBetaRecent,
        "scaledSharpeRecent":
        scaledSharpeRecent,
        "availableAlphaScaled":
        availableAlphaScaled,
        "availableBetaScaled":
        availableBetaScaled,
        "availableSharpeScaled":
        availableSharpeScaled,
        "availableReturnScaled":
        availableReturnScaled,
        "algoVsBenchmarkColsAvailableScaled":
        json.dumps(algoVsBenchmarkColsAvailableScaled),
        "algoVsBenchmarkRowsAvailableScaled":
        json.dumps(algoVsBenchmarkRowsAvailableScaled),
    }
def getLimitedDataForPortfolio(historicalWeights, historicalPredictions, modelsUsed, factorToTrade, joinedData):
    
    normalTickerAllocationsTable, scaledTickerAllocationsTable = historicalWeightsToTickerAllocations(historicalWeights, historicalPredictions, modelsUsed)
    
    # capitalUsed = pd.DataFrame(normalTickerAllocationsTable.apply(lambda x: sum([abs(item) for item in x]), axis=1))
    # print(capitalUsed)

    tickerAllocationsTable = scaledTickerAllocationsTable #scaledTickerAllocationsTable
    tickerAllocationsTable = tickerAllocationsTable.fillna(0)

    tickerPerformance, algoPerformance, algoTransactionCost =  portfolioGeneration.calculatePerformanceForAllocations(tickerAllocationsTable, joinedData)

    benchmark = factorToTrade
    factorReturn = dataAck.getDailyFactorReturn(benchmark, joinedData)
    factorReturn.columns = ["Factor Return (" + benchmark + ")"]
    algoPerformance.columns = ["Algo Return"]

    algoPerformanceRollingWeekly = algoPerformance.rolling(5, min_periods=5).apply(lambda x:empyrical.cum_returns(x)[-1]).dropna()
    algoPerformanceRollingWeekly.columns = ["Weekly Rolling Performance"]
    
    algoPerformanceRollingMonthly = algoPerformance.rolling(22, min_periods=22).apply(lambda x:empyrical.cum_returns(x)[-1]).dropna()
    algoPerformanceRollingMonthly.columns = ["Monthly Rolling Performance"]
    
    algoPerformanceRollingYearly = algoPerformance.rolling(252, min_periods=252).apply(lambda x:empyrical.cum_returns(x)[-1]).dropna()
    algoPerformanceRollingYearly.columns = ["Yearly Rolling Performance"]
    
    tickersUsed = []
    for mod in modelsUsed:
        tickersUsed.append(mod.targetTicker)
    
#     for ticker in tickersUsed:
#         thisFactorReturn = dataAck.getDailyFactorReturn(ticker, joinedData)
#         thisFactorReturn.columns = ["Factor Return (" + ticker + ")"]
#         alpha, beta = empyrical.alpha_beta(algoPerformance, thisFactorReturn)
#         print(ticker, beta)
    
    alpha, beta = empyrical.alpha_beta(algoPerformance, factorReturn)
    sharpe_difference = empyrical.sharpe_ratio(algoPerformance) - empyrical.sharpe_ratio(factorReturn)
    annualizedReturn = empyrical.annual_return(algoPerformance)[0]
    annualizedVolatility = empyrical.annual_volatility(algoPerformance)
    stability = empyrical.stability_of_timeseries(algoPerformance)
    profitability = len((algoPerformance.values)[algoPerformance.values > 0])/len(algoPerformance.values)
    


    rollingSharpe = algoPerformance.rolling(252, min_periods=252).apply(lambda x:empyrical.sharpe_ratio(x)).dropna()
    rollingSharpe.columns = ["252 Day Rolling Sharpe"]

    rollingSharpeError = rollingSharpe["252 Day Rolling Sharpe"].std()
    rollingSharpeMinimum = np.percentile(rollingSharpe["252 Day Rolling Sharpe"].values, 1)

    ##AUTOMATICALLY TAKES SLIPPAGE INTO ACCOUNT
    return {
        "benchmark":factorToTrade,
        "alpha":alpha,
        "beta":abs(beta),
        "sharpe difference":sharpe_difference,
        "annualizedReturn":annualizedReturn,
        "annualizedVolatility":annualizedVolatility,
        "sharpe":empyrical.sharpe_ratio(algoPerformance),
        "free return":annualizedReturn - annualizedVolatility,
        "stability":stability,
        "profitability":profitability,
        "rollingSharpeError":rollingSharpeError,
        "rollingSharpeMinimum":rollingSharpeMinimum,
        "weeklyMinimum":algoPerformanceRollingWeekly.min().values[0],
        "monthlyMinimum":algoPerformanceRollingMonthly.min().values[0],
        "yearlyMinimum":algoPerformanceRollingYearly.min().values[0]
    }, tickerAllocationsTable
示例#8
0
    def runModelHistorical(self, dataOfInterest):

        ##RAW PREDICTIONS ARE PREDS 0->1.0
        returnStream, factorReturn, predictions, slippageAdjustedReturn, rawPredictions1 = self.obj1.runModelHistorical(
            dataOfInterest)
        returnStream, factorReturn, predictions, slippageAdjustedReturn, rawPredictions2 = self.obj2.runModelHistorical(
            dataOfInterest)

        print(rawPredictions1)
        print(rawPredictions2)
        #computePositionConfidence
        rawPredictions1 = pd.DataFrame(rawPredictions1.apply(
            lambda x: dataAck.computePosition(x), axis=1),
                                       columns=["Predictions 1"]).dropna()
        rawPredictions2 = pd.DataFrame(rawPredictions2.apply(
            lambda x: dataAck.computePosition(x), axis=1),
                                       columns=["Predictions 2"]).dropna()

        print(rawPredictions1)
        print(rawPredictions2)

        rawPredictions = rawPredictions1.join(rawPredictions2).dropna()

        print(rawPredictions)
        #averagePredictions
        predsTable = pd.DataFrame(
            rawPredictions.apply(lambda x: self.combinePredictions(x),
                                 axis=1,
                                 raw=True))
        rawPredictions = predsTable
        ##PREDICTIONS COMBINED AS 0, 0.5, 1 where

        i = 1
        tablesToJoin = []
        while i < self.predictionDistance:
            thisTable = predsTable.shift(i)
            thisTable.columns = ["Predictions_" + str(i)]
            tablesToJoin.append(thisTable)
            i += 1

        returnStream = None
        factorReturn = None
        predictions = None
        slippageAdjustedReturn = None

        predsTable = predsTable.join(tablesToJoin)
        ##AVERAGE...A LOT OF SUBTLETY IN STRENGTH OF PREDICTION
        transformedPreds = pd.DataFrame(predsTable.apply(
            lambda x: dataAck.computePosition(x), axis=1),
                                        columns=["Predictions"]).dropna()
        dailyFactorReturn = dataAck.getDailyFactorReturn(
            self.targetTicker, dataOfInterest)
        transformedPreds = transformedPreds.join(dailyFactorReturn).dropna()
        returnStream = pd.DataFrame(transformedPreds.apply(
            lambda x: x[0] * x[1], axis=1),
                                    columns=["Algo Return"])
        factorReturn = pd.DataFrame(transformedPreds[["Factor Return"]])
        predictions = pd.DataFrame(transformedPreds[["Predictions"]])
        estimatedSlippageLoss = portfolioGeneration.estimateTransactionCost(
            predictions)
        estimatedSlippageLoss.columns = returnStream.columns
        slippageAdjustedReturn = (returnStream -
                                  estimatedSlippageLoss).dropna()

        return returnStream, factorReturn, predictions, slippageAdjustedReturn, rawPredictions
示例#9
0
def getDataForPortfolio(portfolioKey):
    models = portfolio.getModelsByKey(portfolio.getPortfolioModels(portfolioKey))
    ##DOWNLOAD REQUIRED DATA FOR TARGET TICKERS
    tickersRequired = []
    for mod in models:
        print(mod.describe())
        if mod.inputSeries.targetTicker not in tickersRequired:
            tickersRequired.append(mod.inputSeries.targetTicker)

    pulledData, validTickers = dataAck.downloadTickerData(tickersRequired)

    joinedData = dataAck.joinDatasets([pulledData[ticker] for ticker in pulledData])
    
    ##GENERATE RETURNS FOR PORTFOLIO
    portfolioAllocations = portfolio.getPortfolioAllocations(portfolioKey)
    
    predsTable = pd.DataFrame([])
    weightsTable = pd.DataFrame([])
    tickerAllocationsTable = pd.DataFrame([])
    scaledTickerAllocationsTable = pd.DataFrame([])
    for allocation in portfolioAllocations:
        colsAlgo = []
        valsAlgo = []
        colsAlgoWeight = []
        valsAlgoWeight = []
        colsTicker = []
        valsTicker = []
        colsTickerScaled = []
        valsTickerScaled = []

        for key in allocation:
            if key.startswith("ticker_"):
                colsTicker.append(key[len("ticker_"):])
                valsTicker.append(allocation[key])
            if key.startswith("scaled_ticker_"):
                colsTickerScaled.append(key[len("scaled_ticker_"):])
                valsTickerScaled.append(allocation[key])
            if key.startswith("algo_") and not key.startswith("algo_weight_"):
                colsAlgo.append(key[len("algo_"):])
                valsAlgo.append(allocation[key])
            if key.startswith("algo_weight_"):
                colsAlgoWeight.append(key[len("algo_weight_"):])
                valsAlgoWeight.append(allocation[key])

        predsTable = pd.concat([predsTable, pd.DataFrame([valsAlgo], index = [allocation["predictionDay"]], columns=colsAlgo).tz_localize(None)])
        weightsTable = pd.concat([weightsTable, pd.DataFrame([valsAlgoWeight], index = [allocation["predictionDay"]], columns=colsAlgoWeight).tz_localize(None)])
        tickerAllocationsTable = pd.concat([tickerAllocationsTable, pd.DataFrame([valsTicker], index = [allocation["predictionDay"]], columns=colsTicker).tz_localize(None)])
        scaledTickerAllocationsTable = pd.concat([scaledTickerAllocationsTable, pd.DataFrame([valsTickerScaled], index = [allocation["predictionDay"]], columns=colsTickerScaled).tz_localize(None)])
    
    predsTable = predsTable.sort_index()
    weightsTable = weightsTable.sort_index()
    tickerAllocationsTable = tickerAllocationsTable.sort_index()
    scaledTickerAllocationsTable = scaledTickerAllocationsTable.sort_index()
    
    tickerPerformance = calculatePerformanceForTable(tickerAllocationsTable, tickerAllocationsTable.columns, joinedData)
    
    algoPerformance = pd.DataFrame(tickerPerformance.apply(lambda x:sum(x), axis=1), columns=["Algo Return"])
    
    benchmark = portfolio.getPortfolioByKey(portfolioKey)["benchmark"]
    factorReturn = dataAck.getDailyFactorReturn(benchmark, joinedData)
    factorReturn.columns = ["Factor Return (" + benchmark + ")"]
    algoVsBenchmark = algoPerformance.join(factorReturn).dropna()
    
    ##FORM HASH TO TICKER
    hashToTicker = {}
    for model in models:
        hashToTicker[portfolio.getModelHash(model)] = model.inputSeries.targetTicker

    individualAlgoPerformance = calculatePerformanceForTable(predsTable,[hashToTicker[modelHash] for modelHash in predsTable.columns], joinedData)
    
    return json.dumps(convertTableToJSON(empyrical.cum_returns(tickerPerformance))),\
        json.dumps(convertTableToJSON(empyrical.cum_returns(algoPerformance))),\
        json.dumps(convertTableToJSON(empyrical.cum_returns(algoVsBenchmark))),\
        json.dumps(convertTableToJSON(empyrical.cum_returns(individualAlgoPerformance))),\
        
示例#10
0
def getFundData():
    historicalAllocations, realizedAllocations = getNetAllocationAcrossPortfolios(
    )
    if historicalAllocations is None:
        return None, None
    pulledData, unused_ = dataAck.downloadTickerData(
        historicalAllocations.columns.values)
    allocationJoinedData = dataAck.joinDatasets(
        [pulledData[ticker] for ticker in pulledData])
    dataToCache = []
    for allocationForm in [historicalAllocations, realizedAllocations]:
        performanceByTicker, fundPerformance, fundTransactionCost = portfolioGeneration.calculatePerformanceForAllocations(
            allocationForm, allocationJoinedData)
        if len(fundPerformance) == 0:
            dataToCache.append({})
            continue

        ##CALCULATE BETAS FOR ALL TICKERS TO FUND PERFORMANCE
        tickerAlphaBetas = []
        for ticker in allocationForm.columns.values:
            factorReturn = dataAck.getDailyFactorReturn(
                ticker, allocationJoinedData)
            alpha, beta = empyrical.alpha_beta(fundPerformance, factorReturn)
            tickerAlphaBetas.append({
                "ticker": ticker,
                "alpha": alpha * 100,
                "beta": beta
            })

        tickerCols, tickerRows = portfolioGeneration.convertTableToJSON(
            empyrical.cum_returns(performanceByTicker))
        tickerAllocationsCols, tickerAllocationsRows = portfolioGeneration.convertTableToJSON(
            allocationForm)
        fundCols, fundRows = portfolioGeneration.convertTableToJSON(
            empyrical.cum_returns(fundPerformance))

        sharpe = empyrical.sharpe_ratio(fundPerformance)
        annualReturn = empyrical.annual_return(fundPerformance)[0]
        annualVol = empyrical.annual_volatility(fundPerformance)

        commissionCols, commissionRows = portfolioGeneration.convertTableToJSON(
            fundTransactionCost)

        dataToCache.append({
            "tickerAlphaBetas":
            tickerAlphaBetas,
            "tickerCols":
            json.dumps(tickerCols),
            "tickerRows":
            json.dumps(tickerRows),
            "tickerAllocationsCols":
            json.dumps(tickerAllocationsCols),
            "tickerAllocationsRows":
            json.dumps(tickerAllocationsRows),
            "fundCols":
            json.dumps(fundCols),
            "fundRows":
            json.dumps(fundRows),
            "sharpe":
            sharpe,
            "annualReturn":
            annualReturn * 100,
            "annualVol":
            annualVol * 100,
            "commissionCols":
            json.dumps(commissionCols),
            "commissionRows":
            json.dumps(commissionRows)
        })

    historicalData = dataToCache[0]
    realizedData = dataToCache[1]
    ##GET TODAY ALLOCATION
    if realizedData != {}:
        newRows = []
        tARows = json.loads(realizedData["tickerAllocationsRows"])
        tACols = json.loads(realizedData["tickerAllocationsCols"])
        print(tARows[-1])
        for i in range(len(tACols)):

            newRows.append([tACols[i],
                            abs(tARows[-1][i + 1])])  ##i+1 because date
        realizedData["todayAllocation"] = json.dumps(newRows)
        print(realizedData["todayAllocation"])

    return historicalData, realizedData
    def runModelsChunksSkipMP(self, dataOfInterest, daysToCheck = None, earlyStop=False):
        xVals, yVals, yIndex, xToday = self.generateWindows(dataOfInterest)
        mpEngine = mp.get_context('fork')
        with mpEngine.Manager() as manager:
            returnDict = manager.dict()
            
            identifiersToCheck = []
            
            for i in range(len(xVals) - 44): ##44 is lag...should not overlap with any other predictions or will ruin validity of walkforward optimization
                if i < 600:
                    ##MIN TRAINING
                    continue
                identifiersToCheck.append(str(i))
                
            if daysToCheck is not None:
                identifiersToCheck = identifiersToCheck[-daysToCheck:]


            ##FIRST CHECK FIRST 500 IDENTIFIERS AND THEN IF GOOD CONTINUE
            

            identifierWindows = [identifiersToCheck[:252], identifiersToCheck[252:600], identifiersToCheck[600:900], identifiersToCheck[900:1200], identifiersToCheck[1200:]] ##EXACTLY TWO YEARS
            if earlyStop == False:
                identifierWindows = [identifiersToCheck]
            returnStream = None
            factorReturn = None
            predictions = None
            slippageAdjustedReturn = None
            rawPredictions = None
            shortSeen = 0 if earlyStop == True else -1
            for clippedIdentifiers in identifierWindows:
                
                splitIdentifiers = np.array_split(np.array(clippedIdentifiers), 4)
                
                
                runningP = []
                k = 0
                for identifiers in splitIdentifiers:
                    p = mpEngine.Process(target=CurvePredictor.runDayChunking, args=(self, xVals, yVals, identifiers, returnDict,k))
                    p.start()
                    runningP.append(p)
                    
                    k += 1
                    

                while len(runningP) > 0:
                    newP = []
                    for p in runningP:
                        if p.is_alive() == True:
                            newP.append(p)
                        else:
                            p.join()
                    runningP = newP
                    
                
                preds = []
                actuals = []
                days = []
                for i in clippedIdentifiers:
                    preds.append(returnDict[i])
                    actuals.append(yVals[int(i) + 44])
                    days.append(yIndex[int(i) + 44])

                ##CREATE ACCURATE BLENDING ACROSS DAYS
                predsTable = pd.DataFrame(preds, index=days, columns=["Predictions"])
                
                i = 1
                tablesToJoin = []
                while i < self.predictionDistance:
                    thisTable = predsTable.shift(i)
                    thisTable.columns = ["Predictions_" + str(i)]
                    tablesToJoin.append(thisTable)
                    i += 1
                
                predsTable = predsTable.join(tablesToJoin)
                transformedPreds = pd.DataFrame(predsTable.apply(lambda x:dataAck.computePosition(x), axis=1), columns=["Predictions"]).dropna()
                dailyFactorReturn = dataAck.getDailyFactorReturn(self.targetTicker, dataOfInterest)
                transformedPreds = transformedPreds.join(dailyFactorReturn).dropna()
                returnStream = pd.DataFrame(transformedPreds.apply(lambda x:x[0] * x[1], axis=1), columns=["Algo Return"]) if returnStream is None else pd.concat([returnStream, pd.DataFrame(transformedPreds.apply(lambda x:x[0] * x[1], axis=1), columns=["Algo Return"])])
                factorReturn = pd.DataFrame(transformedPreds[["Factor Return"]]) if factorReturn is None else pd.concat([factorReturn, pd.DataFrame(transformedPreds[["Factor Return"]])])
                predictions = pd.DataFrame(transformedPreds[["Predictions"]]) if predictions is None else pd.concat([predictions, pd.DataFrame(transformedPreds[["Predictions"]])])
                rawPredictions = pd.DataFrame(preds, index=days, columns=["Predictions"]) if rawPredictions is None else pd.concat([rawPredictions, pd.DataFrame(preds, index=days, columns=["Predictions"])])
                
                alpha, beta = empyrical.alpha_beta(returnStream, factorReturn)
                activity = np.count_nonzero(returnStream)/float(len(returnStream))
                rawBeta = abs(empyrical.alpha_beta(returnStream.apply(lambda x:dataAck.applyBinary(x), axis=0), factorReturn.apply(lambda x:dataAck.applyBinary(x), axis=0))[1])
                shortSharpe = empyrical.sharpe_ratio(returnStream)
                activity = np.count_nonzero(returnStream)/float(len(returnStream))
                algoAnnualReturn = empyrical.annual_return(returnStream.values)[0]
                algoVol = empyrical.annual_volatility(returnStream.values)
                factorAnnualReturn = empyrical.annual_return(factorReturn.values)[0]
                factorVol = empyrical.annual_volatility(factorReturn.values)
                treynor = ((empyrical.annual_return(returnStream.values)[0] - empyrical.annual_return(factorReturn.values)[0]) \
                           / abs(empyrical.beta(returnStream, factorReturn)))
                sharpeDiff = empyrical.sharpe_ratio(returnStream) - empyrical.sharpe_ratio(factorReturn)
                relativeSharpe = sharpeDiff / empyrical.sharpe_ratio(factorReturn) * (empyrical.sharpe_ratio(factorReturn)/abs(empyrical.sharpe_ratio(factorReturn)))
                stability = empyrical.stability_of_timeseries(returnStream)

                ##CALCULATE SHARPE WITH SLIPPAGE
                estimatedSlippageLoss = portfolioGeneration.estimateTransactionCost(predictions)
                estimatedSlippageLoss.columns = returnStream.columns
                slippageAdjustedReturn = (returnStream - estimatedSlippageLoss).dropna()
                slippageSharpe = empyrical.sharpe_ratio(slippageAdjustedReturn)
                sharpeDiffSlippage = empyrical.sharpe_ratio(slippageAdjustedReturn) - empyrical.sharpe_ratio(factorReturn)
                relativeSharpeSlippage = sharpeDiffSlippage / empyrical.sharpe_ratio(factorReturn) * (empyrical.sharpe_ratio(factorReturn)/abs(empyrical.sharpe_ratio(factorReturn)))
                profitability = len((returnStream.values)[returnStream.values > 0])/len(returnStream.values)
                

                rollingProfitability = returnStream.rolling(45, min_periods=45).apply(lambda x:len((x)[x > 0])/len(x)).dropna().values
                minRollingProfitability = np.percentile(rollingProfitability, 1)
                twentyFifthPercentileRollingProfitablity = np.percentile(rollingProfitability, 25)


                if np.isnan(shortSharpe) == True:
                    return None, {"sharpe":shortSharpe}, None, None, None

                elif (profitability < 0.4  or activity < 0.3 or abs(rawBeta) > 0.4 or stability < 0.3) and shortSeen == 0:
                    return None, {
                            "sharpe":shortSharpe, ##OVERLOADED IN FAIL
                            "activity":activity,
                            "factorSharpe":empyrical.sharpe_ratio(factorReturn),
                            "sharpeSlippage":slippageSharpe,
                            "beta":abs(beta),
                            "alpha":alpha,
                            "activity":activity,
                            "treynor":treynor,
                            "period":"first 252 days",
                            "algoReturn":algoAnnualReturn,
                            "algoVol":algoVol,
                            "factorReturn":factorAnnualReturn,
                            "factorVol":factorVol,
                            "sharpeDiff":sharpeDiff,
                            "relativeSharpe":relativeSharpe,
                            "sharpeDiffSlippage":sharpeDiffSlippage,
                            "relativeSharpeSlippage":relativeSharpeSlippage,
                            "rawBeta":rawBeta,
                            "minRollingProfitability":minRollingProfitability,
                            "stability":stability,
                            "twentyFifthPercentileRollingProfitablity":twentyFifthPercentileRollingProfitablity,
                            "profitability":profitability
                    }, None, None, None
                
                elif abs(rawBeta) > 0.33 or activity < 0.3 or stability < 0.4 or twentyFifthPercentileRollingProfitablity < 0.41 \
                     or minRollingProfitability < 0.3 or profitability < 0.46:
                    periodName = "first 600 days"
                    if shortSeen == 2:
                        periodName = "first 900 days"
                    elif shortSeen == 3:
                        periodName = "first 1200 days"
                    return None, {
                            "sharpe":shortSharpe, ##OVERLOADED IN FAIL
                            "activity":activity,
                            "factorSharpe":empyrical.sharpe_ratio(factorReturn),
                            "sharpeSlippage":slippageSharpe,
                            "alpha":alpha,
                            "beta":abs(beta),
                            "activity":activity,
                            "treynor":treynor,
                            "period":periodName,
                            "algoReturn":algoAnnualReturn,
                            "algoVol":algoVol,
                            "factorReturn":factorAnnualReturn,
                            "factorVol":factorVol,
                            "minRollingProfitability":minRollingProfitability,
                            "sharpeDiff":sharpeDiff,
                            "relativeSharpe":relativeSharpe,
                            "sharpeDiffSlippage":sharpeDiffSlippage,
                            "relativeSharpeSlippage":relativeSharpeSlippage,
                            "rawBeta":rawBeta,
                            "stability":stability,
                            "twentyFifthPercentileRollingProfitablity":twentyFifthPercentileRollingProfitablity,
                            "profitability":profitability
                    }, None, None, None
                    
                elif shortSeen < 4:
                    print("CONTINUING", "SHARPE:", shortSharpe, "SHARPE DIFF:", sharpeDiff, "RAW BETA:", rawBeta, "TREYNOR:", treynor)
               
                shortSeen += 1

            return returnStream, factorReturn, predictions, slippageAdjustedReturn, rawPredictions