def navchart(): data = generatenav() navchart = data[["NAV_fx"]].copy() # dates need to be in Epoch time for Highcharts navchart.index = (navchart.index - datetime(1970, 1, 1)).total_seconds() navchart.index = navchart.index * 1000 navchart.index = navchart.index.astype(np.int64) navchart = navchart.to_dict() navchart = navchart["NAV_fx"] port_value_chart = data[[ "PORT_cash_value_fx", "PORT_fx_pos", "PORT_ac_CFs_fx" ]].copy() port_value_chart["ac_pnl_fx"] = (port_value_chart["PORT_fx_pos"] - port_value_chart["PORT_ac_CFs_fx"]) # dates need to be in Epoch time for Highcharts port_value_chart.index = (port_value_chart.index - datetime(1970, 1, 1)).total_seconds() port_value_chart.index = port_value_chart.index * 1000 port_value_chart.index = port_value_chart.index.astype(np.int64) port_value_chart = port_value_chart.to_dict() return render_template("warden/warden_navchart.html", title="NAV Historical Chart", navchart=navchart, port_value_chart=port_value_chart, fx=current_app.settings['PORTFOLIO']['base_fx'], current_user=fx_rate(), data=data, current_app=current_app)
def fiatchartdatajson(): data = generatenav() # Generate data for Stack chart # Filter to Only BTC Positions fx = current_app.settings['PORTFOLIO']['base_fx'] if fx is None: fx = 'USD' try: data['fiat'] = data['PORT_fx_pos'] fiatchart = data[["fiat"]] # dates need to be in Epoch time for Highcharts fiatchart.index = (fiatchart.index - datetime(1970, 1, 1)).total_seconds() fiatchart.index = fiatchart.index * 1000 fiatchart.index = fiatchart.index.astype(np.int64) fiatchart = fiatchart.to_dict() fiatchart = fiatchart["fiat"] # Sort for HighCharts import collections fiatchart = collections.OrderedDict(sorted(fiatchart.items())) fiatchart = json.dumps(fiatchart) except Exception as e: return (json.dumps({"Error": str(e)})) return fiatchart
def histvol(): # if there's rolling variable, get it, otherwise default to 30 if request.method == "GET": try: q = int(request.args.get("rolling")) except ValueError: q = 30 else: q = 30 ticker = request.args.get("ticker") metadata = request.args.get("meta") # When ticker is not sent, will calculate for portfolio if not ticker: data = generatenav() data["vol"] = (data["NAV_fx"].pct_change().rolling(q).std() * (365**0.5) * 100) # data.set_index('date', inplace=True) vollist = data[["vol"]] vollist.index = vollist.index.strftime("%Y-%m-%d") datajson = vollist.to_json() if ticker: filename = "thewarden/historical_data/" + ticker + ".json" filename = os.path.join(current_path(), filename) try: with open(filename) as data_file: local_json = json.loads(data_file.read()) data_file.close() prices = pd.DataFrame( local_json["Time Series (Digital Currency Daily)"]).T prices["4b. close (USD)"] = prices["4b. close (USD)"].astype( np.float) prices["vol"] = ( prices["4b. close (USD)"].pct_change().rolling(q).std() * (365**0.5) * 100) pricelist = prices[["vol"]] datajson = pricelist.to_json() except (FileNotFoundError, KeyError): datajson = "Ticker Not Found" if metadata is not None: metatable = {} metatable["mean"] = vollist.vol.mean() metatable["max"] = vollist.vol.max() metatable["min"] = vollist.vol.min() metatable["last"] = vollist.vol[-1] metatable["lastvsmean"] = ( (vollist.vol[-1] / vollist.vol.mean()) - 1) * 100 metatable = json.dumps(metatable) return metatable return datajson
def generatenav_json(): if request.method == "GET": filter = request.args.get("filter") force = request.args.get("force") if not filter: filter = "" if not force: force = False nav = generatenav(current_user.username, force, filter) return nav.to_json()
def navchartdatajson(): data = generatenav() # Generate data for NAV chart navchart = data[["NAV_fx"]] # dates need to be in Epoch time for Highcharts navchart.index = (navchart.index - datetime(1970, 1, 1)).total_seconds() navchart.index = navchart.index * 1000 navchart.index = navchart.index.astype(np.int64) navchart = navchart.to_dict() navchart = navchart["NAV_fx"] # Sort for HighCharts import collections navchart = collections.OrderedDict(sorted(navchart.items())) navchart = json.dumps(navchart) return navchart
def btcchartdatajson(): data = generatenav() try: data['fiat'] = data['BTC_price'] fiatchart = data[["fiat"]] # dates need to be in Epoch time for Highcharts fiatchart.index = (fiatchart.index - datetime(1970, 1, 1)).total_seconds() fiatchart.index = fiatchart.index * 1000 fiatchart.index = fiatchart.index.astype(np.int64) fiatchart = fiatchart.to_dict() fiatchart = fiatchart["fiat"] # Sort for HighCharts import collections fiatchart = collections.OrderedDict(sorted(fiatchart.items())) fiatchart = json.dumps(fiatchart) except Exception as e: return (json.dumps({"Error": str(e)})) return fiatchart
def stackchartdatajson(): data = generatenav() # Generate data for Stack chart # Filter to Only BTC Positions try: data['BTC_cum'] = data['PORT_VALUE_BTC'] stackchart = data[["BTC_cum"]] # dates need to be in Epoch time for Highcharts stackchart.index = (stackchart.index - datetime(1970, 1, 1)).total_seconds() stackchart.index = stackchart.index * 1000 stackchart.index = stackchart.index.astype(np.int64) stackchart = stackchart.to_dict() stackchart = stackchart["BTC_cum"] # Sort for HighCharts import collections stackchart = collections.OrderedDict(sorted(stackchart.items())) stackchart = json.dumps(stackchart) except Exception as e: return (json.dumps({"Error": str(e)})) return stackchart
def portfolio_compare_json(): if request.method == "GET": tickers = request.args.get("tickers").upper() tickers = tickers.split(",") start_date = request.args.get("start") method = request.args.get("method") # Check if start and end dates exist, if not assign values try: start_date = datetime.strptime(start_date, "%Y-%m-%d") except (ValueError, TypeError) as e: logging.info(f"[portfolio_compare_json] Error: {e}, " + "setting start_date to zero") start_date = datetime.strptime('2011-01-01', "%Y-%m-%d") end_date = request.args.get("end") try: end_date = datetime.strptime(end_date, "%Y-%m-%d") except (ValueError, TypeError) as e: logging.info(f"[portfolio_compare_json] Error: {e}, " + "setting end_date to now") end_date = datetime.now() data = {} logging.info("[portfolio_compare_json] NAV requested in list of " + "tickers, requesting generatenav.") nav = generatenav() nav_only = nav["NAV_fx"] # Now go over tickers and merge into nav_only df messages = {} meta_data = {} fx = current_app.settings['PORTFOLIO']['base_fx'] if fx is None: fx = 'USD' for ticker in tickers: if ticker == "NAV": # Ticker was NAV, skipped continue # Generate price Table now for the ticker and trim to match portfolio data = historical_prices(ticker, fx=fx) data.index = data.index.astype('datetime64[ns]') # If notification is an error, skip this ticker if data is None: messages = data.errors return jsonify(messages) data = data.rename(columns={'close_converted': ticker + '_price'}) data = data[ticker + '_price'] nav_only = pd.merge(nav_only, data, on="date", how="left") nav_only[ticker + "_price"].fillna(method="bfill", inplace=True) messages[ticker] = "ok" logging.info(f"[portfolio_compare_json] {ticker}: Success - Merged OK") nav_only.fillna(method="ffill", inplace=True) # Trim this list only to start_date to end_date: mask = (nav_only.index >= start_date) & (nav_only.index <= end_date) nav_only = nav_only.loc[mask] # Now create the list of normalized Returns for the available period # Plus create a table with individual analysis for each ticker and NAV nav_only["NAV_norm"] = (nav_only["NAV_fx"] / nav_only["NAV_fx"][0]) * 100 nav_only["NAV_ret"] = nav_only["NAV_norm"].pct_change() table = {} table["meta"] = {} table["meta"]["start_date"] = nav_only.index[0].strftime("%m-%d-%Y") table["meta"]["end_date"] = nav_only.index[-1].strftime("%m-%d-%Y") table["meta"]["number_of_days"] = ((nav_only.index[-1] - nav_only.index[0])).days table["meta"]["count_of_points"] = nav_only["NAV_fx"].count().astype(float) table["NAV"] = {} table["NAV"]["start"] = nav_only["NAV_fx"][0] table["NAV"]["end"] = nav_only["NAV_fx"][-1] table["NAV"]["return"] = (nav_only["NAV_fx"][-1] / nav_only["NAV_fx"][0]) - 1 table["NAV"]["avg_return"] = nav_only["NAV_ret"].mean() table["NAV"]["ann_std_dev"] = nav_only["NAV_ret"].std() * math.sqrt(365) for ticker in tickers: if messages[ticker] == "ok": # Include new columns for return and normalized data nav_only[ticker + "_norm"] = (nav_only[ticker + "_price"] / nav_only[ticker + "_price"][0]) * 100 nav_only[ticker + "_ret"] = nav_only[ticker + "_norm"].pct_change() # Create Metadata table[ticker] = {} table[ticker]["start"] = nav_only[ticker + "_price"][0] table[ticker]["end"] = nav_only[ticker + "_price"][-1] table[ticker]["return"] = (nav_only[ticker + "_price"][-1] / nav_only[ticker + "_price"][0]) - 1 table[ticker]["comp2nav"] = table[ticker]["return"] - \ table["NAV"]["return"] table[ticker]["avg_return"] = nav_only[ticker + "_ret"].mean() table[ticker]["ann_std_dev"] = nav_only[ ticker + "_ret"].std() * math.sqrt(365) logging.info("[portfolio_compare_json] Success") # Create Correlation Matrix filter_col = [col for col in nav_only if col.endswith("_ret")] nav_matrix = nav_only[filter_col] corr_matrix = nav_matrix.corr(method="pearson").round(2) corr_html = corr_matrix.to_html(classes="table small text-center", border=0, justify="center") # Now, let's return the data in the correct format as requested if method == "chart": return_data = { "data": nav_only.to_json(), "messages": messages, "meta_data": meta_data, "table": table, "corr_html": corr_html, } return jsonify(return_data) return nav_only.to_json()
def portstats(): meta = {} # Looking to generate the following data here and return as JSON # for AJAX query on front page: # Start date, End Date, Start NAV, End NAV, Returns (1d, 1wk, 1mo, 1yr, # YTD), average daily return. Best day, worse day. Std dev of daily ret, # Higher NAV, Lower NAV + dates. Higher Port Value (date). data = generatenav() meta["start_date"] = (data.index.min()).date().strftime("%B %d, %Y") meta["end_date"] = data.index.max().date().strftime("%B %d, %Y") meta["start_nav"] = data["NAV_fx"][0] meta["end_nav"] = float(data["NAV_fx"][-1]) meta["max_nav"] = float(data["NAV_fx"].max()) meta["max_nav_date"] = data[ data["NAV_fx"] == data["NAV_fx"].max()].index.strftime("%B %d, %Y")[0] meta["min_nav"] = float(data["NAV_fx"].min()) meta["min_nav_date"] = data[ data["NAV_fx"] == data["NAV_fx"].min()].index.strftime("%B %d, %Y")[0] meta["end_portvalue"] = data["PORT_fx_pos"][-1].astype(float) meta["end_portvalue_usd"] = meta["end_portvalue"] / fx_rate()['fx_rate'] meta["max_portvalue"] = data["PORT_fx_pos"].astype(float).max() meta["max_port_date"] = data[data["PORT_fx_pos"] == data["PORT_fx_pos"]. max()].index.strftime("%B %d, %Y")[0] meta["min_portvalue"] = round(data["PORT_fx_pos"].min(), 0) meta["min_port_date"] = data[data["PORT_fx_pos"] == data["PORT_fx_pos"]. min()].index.strftime("%B %d, %Y")[0] meta["return_SI"] = (meta["end_nav"] / meta["start_nav"]) - 1 # Temporary fix for an issue with portfolios that are just too new # Create a function to handle this try: meta["return_1d"] = (meta["end_nav"] / data["NAV_fx"][-2]) - 1 except IndexError: meta["return_1d"] = "-" try: meta["return_1wk"] = (meta["end_nav"] / data["NAV_fx"][-7]) - 1 except IndexError: meta["return_1wk"] = "-" try: meta["return_30d"] = (meta["end_nav"] / data["NAV_fx"][-30]) - 1 except IndexError: meta["return_30d"] = "-" try: meta["return_90d"] = (meta["end_nav"] / data["NAV_fx"][-90]) - 1 except IndexError: meta["return_90d"] = "-" try: meta["return_ATH"] = (meta["end_nav"] / meta["max_nav"]) - 1 except IndexError: meta["return_ATH"] = "-" try: yr_ago = pd.to_datetime(datetime.today() - relativedelta(years=1)) yr_ago_NAV = data.NAV_fx[data.index.get_loc(yr_ago, method="nearest")] meta["return_1yr"] = meta["end_nav"] / yr_ago_NAV - 1 except IndexError: meta["return_1yr"] = "-" # Create data for summa"age meta["fx"] = current_app.settings['PORTFOLIO']['base_fx'] meta["daily"] = {} for days in range(1, 8): meta["daily"][days] = {} meta["daily"][days]["date"] = data.index[days * -1].date().strftime( "%A <br> %m/%d") meta["daily"][days]["nav"] = data["NAV_fx"][days * -1] meta["daily"][days]["nav_prev"] = data["NAV_fx"][(days + 1) * -1] meta["daily"][days]["perc_chg"] = (meta["daily"][days]["nav"] / meta["daily"][days]["nav_prev"]) - 1 meta["daily"][days]["port"] = data["PORT_fx_pos"][days * -1] meta["daily"][days]["port_prev"] = data["PORT_fx_pos"][(days + 1) * -1] meta["daily"][days]["port_chg"] = (meta["daily"][days]["port"] - meta["daily"][days]["port_prev"]) # Removes Numpy type from json - returns int instead def convert(o): if isinstance(o, np.int64): return int(o) else: return (o) # create chart data for a small NAV chart return simplejson.dumps(meta, ignore_nan=True, default=convert)
def heatmap_generator(): # If no Transactions for this user, return empty.html from warden_modules import transactions_fx, generatenav transactions = transactions_fx() if transactions.empty: return None, None, None, None # Generate NAV Table first data = generatenav() data["navpchange"] = (data["NAV_fx"] / data["NAV_fx"].shift(1)) - 1 returns = data["navpchange"] # Run the mrh function to generate heapmap table heatmap = mrh.get(returns, eoy=True) heatmap_stats = heatmap cols = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", "eoy", ] cols_months = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", ] years = (heatmap.index.tolist()) heatmap_stats["MAX"] = heatmap_stats[heatmap_stats[cols_months] != 0].max( axis=1) heatmap_stats["MIN"] = heatmap_stats[heatmap_stats[cols_months] != 0].min( axis=1) heatmap_stats["POSITIVES"] = heatmap_stats[ heatmap_stats[cols_months] > 0].count(axis=1) heatmap_stats["NEGATIVES"] = heatmap_stats[ heatmap_stats[cols_months] < 0].count(axis=1) heatmap_stats["POS_MEAN"] = heatmap_stats[ heatmap_stats[cols_months] > 0].mean(axis=1) heatmap_stats["NEG_MEAN"] = heatmap_stats[ heatmap_stats[cols_months] < 0].mean(axis=1) heatmap_stats["MEAN"] = heatmap_stats[ heatmap_stats[cols_months] != 0].mean(axis=1) return (heatmap, heatmap_stats, years, cols)
def drawdown_json(): # Get the arguments and store if request.method == "GET": start_date = request.args.get("start") ticker = request.args.get("ticker") n_dd = request.args.get("n_dd") chart = request.args.get("chart") if not ticker: ticker = "NAV" ticker = ticker.upper() if n_dd: try: n_dd = int(n_dd) except TypeError: n_dd = 2 if not n_dd: n_dd = 2 # Check if start and end dates exist, if not assign values try: start_date = datetime.strptime(start_date, "%Y-%m-%d") except (ValueError, TypeError) as e: logging.info(f"Warning: {e}, " + "setting start_date to zero") start_date = datetime(2000, 1, 1) end_date = request.args.get("end") try: end_date = datetime.strptime(end_date, "%Y-%m-%d") except (ValueError, TypeError) as e: logging.info(f"Warning: {e}, " + "setting end_date to now") end_date = datetime.now() # Create a df with either NAV or ticker prices if ticker == "NAV": data = generatenav(current_user.username) data = data[["NAV_fx"]] data = data.rename(columns={'NAV_fx': 'close'}) else: # Get price of ticker passed as argument data = price_data_fx(ticker) # If notification is an error, skip this ticker if data is None: messages = data.errors return jsonify(messages) data = data.rename(columns={'close_converted': ticker + '_price'}) data = data[[ticker + '_price']] data = data.astype(float) data.sort_index(ascending=True, inplace=True) data = data.rename(columns={ticker + '_price': 'close'}) # Trim the df only to start_date to end_date: mask = (data.index >= start_date) & (data.index <= end_date) data = data.loc[mask] # # Calculate drawdowns # df = 100 * (1 + data / 100).cumprod() df = pd.DataFrame() df["close"] = data['close'] df["ret"] = df.close / df.close[0] df["modMax"] = df.ret.cummax() df["modDD"] = (df.ret / df["modMax"]) - 1 # Starting date of the currency modMax df["end_date"] = df.index # is this the first occurence of this modMax? df["dup"] = df.duplicated(["modMax"]) # Now, exclude the drawdowns that have overlapping data, keep only highest df_group = df.groupby(["modMax"]).min().sort_values(by="modDD", ascending=True) # Trim to fit n_dd df_group = df_group.head(n_dd) # Format a dict for return return_list = [] for index, row in df_group.iterrows(): # access data using column names tmp_dict = {} tmp_dict["dd"] = row["modDD"] tmp_dict["start_date"] = row["end_date"].strftime("%Y-%m-%d") tmp_dict["end_value"] = row["close"] tmp_dict["recovery_date"] = ( df[df.modMax == index].tail(1).end_date[0].strftime("%Y-%m-%d")) tmp_dict["end_date"] = (df[df.close == row["close"]].tail( 1).end_date[0].strftime("%Y-%m-%d")) tmp_dict["start_value"] = df[df.index == row["end_date"]].tail( 1).close[0] tmp_dict["days_to_recovery"] = ( df[df.modMax == index].tail(1).end_date[0] - row["end_date"]).days tmp_dict["days_to_bottom"] = ( df[df.close == row["close"]].tail(1).end_date[0] - row["end_date"]).days tmp_dict["days_bottom_to_recovery"] = ( df[df.modMax == index].tail(1).end_date[0] - df[df.close == row["close"]].tail(1).end_date[0]).days return_list.append(tmp_dict) if chart: start_date = data.index.min() total_days = (end_date - start_date).days # dates need to be in Epoch time for Highcharts data.index = (data.index - datetime(1970, 1, 1)).total_seconds() data.index = data.index * 1000 data.index = data.index.astype(np.int64) data = data.to_dict() # Generate the flags for the chart # { # x: 1500076800000, # title: 'TEST', # text: 'TEST text' # } flags = [] plot_bands = [] # Create a dict for flags and plotBands on chart total_recovery_days = 0 total_drawdown_days = 0 for item in return_list: # First the start date for all dd tmp_dict = {} start_date = datetime.strptime(item["start_date"], "%Y-%m-%d") start_date = (start_date - datetime(1970, 1, 1)).total_seconds() * 1000 tmp_dict["x"] = start_date tmp_dict["title"] = "TOP" tmp_dict["text"] = "Start of drawdown" flags.append(tmp_dict) # Now the bottom for all dd tmp_dict = {} end_date = datetime.strptime(item["end_date"], "%Y-%m-%d") end_date = (end_date - datetime(1970, 1, 1)).total_seconds() * 1000 tmp_dict["x"] = end_date tmp_dict["title"] = "BOTTOM" tmp_dict["text"] = "Bottom of drawdown" flags.append(tmp_dict) # Now the bottom for all dd tmp_dict = {} recovery_date = datetime.strptime(item["recovery_date"], "%Y-%m-%d") recovery_date = (recovery_date - datetime(1970, 1, 1)).total_seconds() * 1000 tmp_dict["x"] = recovery_date tmp_dict["title"] = "RECOVERED" tmp_dict["text"] = "End of drawdown Cycle" flags.append(tmp_dict) # Now create the plot bands drop_days = (end_date - start_date) / 1000 / 60 / 60 / 24 recovery_days = (recovery_date - end_date) / 1000 / 60 / 60 / 24 total_drawdown_days += round(drop_days, 0) total_recovery_days += round(recovery_days, 0) tmp_dict = {} tmp_dict["label"] = {} tmp_dict["label"]["align"] = "center" tmp_dict["label"]["textAlign"] = "left" tmp_dict["label"]["rotation"] = 90 tmp_dict["label"]["text"] = "Lasted " + \ str(round(drop_days, 0)) + " days" tmp_dict["label"]["style"] = {} tmp_dict["label"]["style"]["color"] = "white" tmp_dict["label"]["style"]["fontWeight"] = "bold" tmp_dict["color"] = "#E6A68E" tmp_dict["from"] = start_date tmp_dict["to"] = end_date plot_bands.append(tmp_dict) tmp_dict = {} tmp_dict["label"] = {} tmp_dict["label"]["rotation"] = 90 tmp_dict["label"]["align"] = "center" tmp_dict["label"]["textAlign"] = "left" tmp_dict["label"]["text"] = ("Lasted " + str(round(recovery_days, 0)) + " days") tmp_dict["label"]["style"] = {} tmp_dict["label"]["style"]["color"] = "white" tmp_dict["label"]["style"]["fontWeight"] = "bold" tmp_dict["color"] = "#8CADE1" tmp_dict["from"] = end_date tmp_dict["to"] = recovery_date plot_bands.append(tmp_dict) return jsonify({ "chart_data": data['close'], "messages": "OK", "chart_flags": flags, "plot_bands": plot_bands, "days": { "recovery": total_recovery_days, "drawdown": total_drawdown_days, "trending": total_days - total_drawdown_days - total_recovery_days, "non_trending": total_drawdown_days + total_recovery_days, "total": total_days, }, }) return simplejson.dumps(return_list)