def test_historical_parsed(self): # Sources directly # BTC from Alphavantage Digital result = historical('BTC', function='DIGITAL_CURRENCY_DAILY') self.assertIsInstance(result, pd.DataFrame, 'BTC DIGITAL_CURRENCY_DAILY') self.assertFalse(result.empty, 'BTC DIGITAL_CURRENCY_DAILY') # BTC converted to BRL result = historical_prices('BTC', fx='EUR') self.assertIsInstance(result, pd.DataFrame, 'BTC BRL') self.assertFalse(result.empty, 'BTC BRL') # EUR from Alphavantage FX result = historical('EUR', function='FX_DAILY') self.assertIsInstance(result, pd.DataFrame, 'EUR FX_DAILY') self.assertFalse(result.empty, 'EUR FX_DAILY') # AAPL from Alphavantage TIME_SERIES_DAILY_ADJUSTED result = historical('AAPL', function='TIME_SERIES_DAILY_ADJUSTED') self.assertIsInstance(result, pd.DataFrame, 'AAPL TIME_SERIES_DAILY_ADJUSTED') self.assertFalse(result.empty, 'AAPL TIME_SERIES_DAILY_ADJUSTED') # BTC from Cryptocompare result = cc_historical('BTC') self.assertIsInstance(result, pd.DataFrame, 'BTC CC') self.assertFalse(result.empty, 'BTC CC') # GBTC from fmp result = td_historical('GBTC') # if FP API calls reached limit it will raise an error self.assertIsInstance(result, pd.DataFrame, 'GBTC 12D') self.assertFalse(result.empty, 'GBTC 12d') # Test Auto grabber using engine.py historical_prices ticker_list = ['BTC', 'GBTC', 'IBM'] for ticker in ticker_list: results = historical_prices(ticker) self.assertIsInstance(results, pd.DataFrame, f'{ticker} - Auto historical') self.assertFalse(results.empty, f'{ticker} - Auto historical')
def portfolio_compare_json(): if request.method == "GET": tickers = request.args.get("tickers").upper() tickers = tickers.split(",") start_date = request.args.get("start") method = request.args.get("method") # Check if start and end dates exist, if not assign values try: start_date = datetime.strptime(start_date, "%Y-%m-%d") except (ValueError, TypeError) as e: logging.info(f"[portfolio_compare_json] Error: {e}, " + "setting start_date to zero") start_date = datetime.strptime('2011-01-01', "%Y-%m-%d") end_date = request.args.get("end") try: end_date = datetime.strptime(end_date, "%Y-%m-%d") except (ValueError, TypeError) as e: logging.info(f"[portfolio_compare_json] Error: {e}, " + "setting end_date to now") end_date = datetime.now() data = {} logging.info("[portfolio_compare_json] NAV requested in list of " + "tickers, requesting generatenav.") nav = generatenav() nav_only = nav["NAV_fx"] # Now go over tickers and merge into nav_only df messages = {} meta_data = {} fx = current_app.settings['PORTFOLIO']['base_fx'] if fx is None: fx = 'USD' for ticker in tickers: if ticker == "NAV": # Ticker was NAV, skipped continue # Generate price Table now for the ticker and trim to match portfolio data = historical_prices(ticker, fx=fx) data.index = data.index.astype('datetime64[ns]') # If notification is an error, skip this ticker if data is None: messages = data.errors return jsonify(messages) data = data.rename(columns={'close_converted': ticker + '_price'}) data = data[ticker + '_price'] nav_only = pd.merge(nav_only, data, on="date", how="left") nav_only[ticker + "_price"].fillna(method="bfill", inplace=True) messages[ticker] = "ok" logging.info(f"[portfolio_compare_json] {ticker}: Success - Merged OK") nav_only.fillna(method="ffill", inplace=True) # Trim this list only to start_date to end_date: mask = (nav_only.index >= start_date) & (nav_only.index <= end_date) nav_only = nav_only.loc[mask] # Now create the list of normalized Returns for the available period # Plus create a table with individual analysis for each ticker and NAV nav_only["NAV_norm"] = (nav_only["NAV_fx"] / nav_only["NAV_fx"][0]) * 100 nav_only["NAV_ret"] = nav_only["NAV_norm"].pct_change() table = {} table["meta"] = {} table["meta"]["start_date"] = nav_only.index[0].strftime("%m-%d-%Y") table["meta"]["end_date"] = nav_only.index[-1].strftime("%m-%d-%Y") table["meta"]["number_of_days"] = ((nav_only.index[-1] - nav_only.index[0])).days table["meta"]["count_of_points"] = nav_only["NAV_fx"].count().astype(float) table["NAV"] = {} table["NAV"]["start"] = nav_only["NAV_fx"][0] table["NAV"]["end"] = nav_only["NAV_fx"][-1] table["NAV"]["return"] = (nav_only["NAV_fx"][-1] / nav_only["NAV_fx"][0]) - 1 table["NAV"]["avg_return"] = nav_only["NAV_ret"].mean() table["NAV"]["ann_std_dev"] = nav_only["NAV_ret"].std() * math.sqrt(365) for ticker in tickers: if messages[ticker] == "ok": # Include new columns for return and normalized data nav_only[ticker + "_norm"] = (nav_only[ticker + "_price"] / nav_only[ticker + "_price"][0]) * 100 nav_only[ticker + "_ret"] = nav_only[ticker + "_norm"].pct_change() # Create Metadata table[ticker] = {} table[ticker]["start"] = nav_only[ticker + "_price"][0] table[ticker]["end"] = nav_only[ticker + "_price"][-1] table[ticker]["return"] = (nav_only[ticker + "_price"][-1] / nav_only[ticker + "_price"][0]) - 1 table[ticker]["comp2nav"] = table[ticker]["return"] - \ table["NAV"]["return"] table[ticker]["avg_return"] = nav_only[ticker + "_ret"].mean() table[ticker]["ann_std_dev"] = nav_only[ ticker + "_ret"].std() * math.sqrt(365) logging.info("[portfolio_compare_json] Success") # Create Correlation Matrix filter_col = [col for col in nav_only if col.endswith("_ret")] nav_matrix = nav_only[filter_col] corr_matrix = nav_matrix.corr(method="pearson").round(2) corr_html = corr_matrix.to_html(classes="table small text-center", border=0, justify="center") # Now, let's return the data in the correct format as requested if method == "chart": return_data = { "data": nav_only.to_json(), "messages": messages, "meta_data": meta_data, "table": table, "corr_html": corr_html, } return jsonify(return_data) return nav_only.to_json()
def heatmapbenchmark_json(): # Get portfolio data first heatmap_gen, heatmap_stats, years, cols = heatmap_generator() # Now get the ticker information and run comparison if request.method == "GET": ticker = request.args.get("ticker") # Defaults to king BTC if not ticker: ticker = "BTC" # Gather the first trade date in portfolio and store # used to match the matrixes later # Panda dataframe with transactions df = transactions_fx() # Filter the df acccoring to filter passed as arguments df["trade_date"] = pd.to_datetime(df["trade_date"]) start_date = df["trade_date"].min() start_date -= timedelta(days=1) # start on t-1 of first trade # Generate price Table now for the ticker and trim to match portfolio fx = current_app.settings['PORTFOLIO']['base_fx'] data = historical_prices(ticker, fx) mask = data.index >= start_date data = data.loc[mask] # If notification is an error, skip this ticker if data is None: messages = data.errors return jsonify(messages) data = data.rename(columns={'close_converted': ticker + '_price'}) data = data[[ticker + '_price']] data.sort_index(ascending=True, inplace=True) data["pchange"] = (data / data.shift(1)) - 1 # Run the mrh function to generate heapmap table heatmap = mrh.get(data["pchange"], eoy=True) heatmap_stats = heatmap cols = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", "eoy", ] cols_months = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", ] years = heatmap.index.tolist() # Create summary stats for the Ticker heatmap_stats["MAX"] = heatmap_stats[heatmap_stats[cols_months] != 0].max( axis=1) heatmap_stats["MIN"] = heatmap_stats[heatmap_stats[cols_months] != 0].min( axis=1) heatmap_stats["POSITIVES"] = heatmap_stats[ heatmap_stats[cols_months] > 0].count(axis=1) heatmap_stats["NEGATIVES"] = heatmap_stats[ heatmap_stats[cols_months] < 0].count(axis=1) heatmap_stats["POS_MEAN"] = heatmap_stats[ heatmap_stats[cols_months] > 0].mean(axis=1) heatmap_stats["NEG_MEAN"] = heatmap_stats[ heatmap_stats[cols_months] < 0].mean(axis=1) heatmap_stats["MEAN"] = heatmap_stats[ heatmap_stats[cols_months] != 0].mean(axis=1) # Create the difference between the 2 df - Pandas is cool! heatmap_difference = heatmap_gen - heatmap # return (heatmap, heatmap_stats, years, cols, ticker, heatmap_diff) return simplejson.dumps( { "heatmap": heatmap.to_dict(), "heatmap_stats": heatmap_stats.to_dict(), "cols": cols, "years": years, "ticker": ticker, "heatmap_diff": heatmap_difference.to_dict(), }, ignore_nan=True, default=datetime.isoformat, )
def price_and_position(): # Gets price and position data for a specific ticker ticker = request.args.get("ticker") fx = request.args.get("fx") if fx is None: fx = fx_rate()['base'] # Gets Price and market data first realtime_data = realtime_price(ticker=ticker, fx=fx) historical_data = historical_prices(ticker=ticker, fx=fx) historical_data.index = historical_data.index.astype('datetime64[ns]') filemeta = (ticker + "_" + fx + ".meta") historical_meta = pickle_it(action='load', filename=filemeta) price_chart = historical_data[["close_converted", "close"]].copy() # dates need to be in Epoch time for Highcharts price_chart.index = price_chart.index.astype('datetime64[ns]') price_chart.index = (price_chart.index - datetime(1970, 1, 1)).total_seconds() price_chart.index = price_chart.index * 1000 price_chart.index = price_chart.index.astype(np.int64) price_chart = price_chart.to_dict() price_chart_usd = price_chart["close"] price_chart = price_chart["close_converted"] # Now gets position data df = positions() if isinstance(df, pd.DataFrame): if not df.empty: df = df[df['trade_asset_ticker'] == ticker] df_trades = transactions_fx() position_chart = None if isinstance(df_trades, pd.DataFrame): df_trades = df_trades[df_trades['trade_asset_ticker'] == ticker] if not df_trades.empty: df_trades = df_trades.sort_index(ascending=True) df_trades['trade_quantity_cum'] = df_trades[ 'trade_quantity'].cumsum() position_chart = df_trades[["trade_quantity_cum"]].copy() # dates need to be in Epoch time for Highcharts position_chart.index = position_chart.index.astype( 'datetime64[ns]') position_chart.index = (position_chart.index - datetime(1970, 1, 1)).total_seconds() position_chart.index = position_chart.index * 1000 position_chart.index = position_chart.index.astype(np.int64) position_chart = position_chart.to_dict() position_chart = position_chart["trade_quantity_cum"] if ticker == 'GBTC': from pricing_engine.engine import GBTC_premium from parseNumbers import parseNumber GBTC_price = parseNumber(realtime_data['price']) GBTC_fairvalue, GBTC_premium = GBTC_premium(GBTC_price) else: GBTC_premium = GBTC_fairvalue = None return render_template("warden/price_and_position.html", title="Ticker Price and Positions", current_app=current_app, current_user=fx_rate(), realtime_data=realtime_data, historical_data=historical_data, historical_meta=historical_meta, positions=df, ticker=ticker, fx=fx, price_chart=price_chart, price_chart_usd=price_chart_usd, position_chart=position_chart, GBTC_premium=GBTC_premium, GBTC_fairvalue=GBTC_fairvalue)
def simulate_portfolio( assets=['BTC'], # List of asset tickers weights=[1], # List of weights, 1 = 100% rebalance='never', # never, daily, weekly, monthly, quarterly, annually save=False, # saves the variables above under a name name=None, # string of name to save initial_investment=1000, # in fx values load=False, start_date=datetime(2000, 1, 1), end_date=datetime.today(), fx='USD', short_term_tax_rate=0): # Create an empty df merged_df = pd.DataFrame(columns=['date']) # Fill the dates from first trade until today merged_df['date'] = pd.date_range(start=start_date, end=end_date) merged_df = merged_df.set_index('date') merged_df.index = merged_df.index.astype('datetime64[ns]') # Create empty columns for later merged_df['fiat_value'] = 0 merged_df['rebalance_date'] = False for ticker in assets: prices = historical_prices(ticker, fx=fx) prices.index = prices.index.astype('datetime64[ns]') if prices.empty: merged_df[id + '_price'] = 0 flash( f"Prices for ticker {id} could not be downloaded." + " {id} was not included in analysis.", "warning") save = False start_date_ticker = prices.index.min() if start_date_ticker > start_date: try: flash( f"Requested start date was {start_date.strftime('%b-%d-%y')} " + f"but the ticker {id} only has pricing data from " + f"{start_date_ticker.strftime('%b-%d-%y')}. Adjusted start date.", "warning") except Exception: pass start_date = start_date_ticker prices = prices.rename(columns={'close_converted': ticker + '_price'}) prices[ticker + '_price'] = prices[ticker + '_price'].astype(float) prices = prices[ticker + '_price'] # Check if prices is a Series. If so, convert to dataframe if isinstance(prices, pd.Series): prices = prices.to_frame() merged_df = pd.merge(merged_df, prices, on='date', how='left') # Replace NaN with prev value, if no prev value then zero merged_df[ticker + '_price'].fillna(method='backfill', inplace=True) merged_df[ticker + '_price'].fillna(method='ffill', inplace=True) merged_df[ticker + '_return'] = merged_df[ticker + '_price'].pct_change().fillna(0) # Trim the dataframe so it starts at the new start date # start date is adjusted to the first date when both datas are # available -- see code above mask = (merged_df.index >= start_date) merged_df = merged_df.loc[mask] # With the dataframe trimmed, calculate cum returns for ticker in assets: # Calculate cum_returns merged_df[ticker + '_cum_return'] = (1 + merged_df[ticker + '_return']).cumprod() # Calculate the unrebalanced positions merged_df[ticker + '_fiat_pos_unbalanced'] = (weights[assets.index(ticker)] * initial_investment * merged_df[ticker + '_cum_return']) merged_df[ticker + '_fiat_pos_balanced'] = np.nan # Portfolio Value unrebalanced merged_df['port_fiat_pos_unbalanced'] = (merged_df[[ col for col in merged_df.columns if col.endswith('_fiat_pos_unbalanced') ]].sum(axis=1)) for ticker in assets: merged_df[ticker + '_weight'] = (merged_df[ticker + '_fiat_pos_unbalanced'] / merged_df['port_fiat_pos_unbalanced']) # Create a list of rebalancing dates rebalance_days = [('never', None), ('daily', timedelta(days=1)), ('weekly', timedelta(days=7)), ('monthly', relativedelta(months=+1)), ('quarterly', relativedelta(months=+3)), ('annualy', relativedelta(months=+12))] rebalancing_delta = dict(rebalance_days)[rebalance] # Fill the df with these checks for rebalancing dates loop_date = start_date if rebalancing_delta is not None: while loop_date < end_date: merged_df.at[loop_date, 'rebalance_date'] = True loop_date += rebalancing_delta previous_date = start_date # Rebalance the portfolio on rebalancing dates for loop_date in merged_df.index.tolist(): if loop_date == start_date: for ticker in assets: merged_df.at[loop_date, ticker + '_costbasis'] = (weights[assets.index(ticker)] * initial_investment) continue # NOT REBALANCE DATE: if not merged_df.at[loop_date, 'rebalance_date']: # Repeat the cost basis from before, nothing changed for ticker in assets: merged_df.at[loop_date, ticker + '_costbasis'] = (merged_df.at[previous_date, ticker + '_costbasis']) merged_df.at[loop_date, ticker + '_fiat_pos_balanced'] = ( merged_df.at[previous_date, ticker + '_fiat_pos_balanced'] * (1 + merged_df.at[loop_date, ticker + '_return'])) # REBALANCE DATE, make changes else: print(loop_date) previous_date = loop_date print(merged_df)
def generatenav(user=None, force=False, filter=None): if not user: user = current_user.username PORTFOLIO_MIN_SIZE_NAV = 1 RENEW_NAV = 10 FX = current_app.settings['PORTFOLIO']['base_fx'] # Portfolios smaller than this size do not account for NAV calculations # Otherwise, there's an impact of dust left in the portfolio (in USD) # This is set in config.ini file min_size_for_calc = int(PORTFOLIO_MIN_SIZE_NAV) save_nav = True # This process can take some time and it's intensive to run NAV # generation every time the NAV is needed. A compromise is to save # the last NAV generation locally and only refresh after a period of time. # This period of time is setup in config.ini as RENEW_NAV (in minutes). # If last file is newer than 60 minutes (default), the local saved file # will be used. # Unless force is true, then a rebuild is done regardless # Local files are saved under a hash of username. filename = "warden/" + user + FX + ".nav" filename = os.path.join(home_path(), filename) if force: # Since this function can be run as a thread, it's safer to delete # the current NAV file if it exists. This avoids other tasks reading # the local file which is outdated try: os.remove(filename) except Exception: pass if not force: try: # Check if NAV saved file is recent enough to be used # Local file has to have a saved time less than RENEW_NAV min old modified = datetime.utcfromtimestamp(os.path.getmtime(filename)) elapsed_seconds = (datetime.utcnow() - modified).total_seconds() if (elapsed_seconds / 60) < int(RENEW_NAV): nav_pickle = pd.read_pickle(filename) return (nav_pickle) else: pass except Exception: pass # Pandas dataframe with transactions df = transactions_fx() # Make sure it is a dataframe if isinstance(df, pd.Series): df = df.to_frame() # if a filter argument was passed, execute it if filter: df = df.query(filter) start_date = df.index.min() - timedelta( days=1) # start on t-1 of first trade end_date = datetime.today() # Create a list of all tickers that were traded in this portfolio tickers = list_tickers() if 'BTC' not in tickers: tickers.append('BTC') fx = current_app.settings['PORTFOLIO']['base_fx'] if fx is None: fx = 'USD' # Create an empty DF, fill with dates and fill with operation and prices then NAV dailynav = pd.DataFrame(columns=['date']) # Fill the dates from first trade until today dailynav['date'] = pd.date_range(start=start_date, end=end_date) dailynav = dailynav.set_index('date') dailynav.index = dailynav.index.astype('datetime64[ns]') # Create empty fields dailynav['PORT_usd_pos'] = 0 dailynav['PORT_fx_pos'] = 0 dailynav['PORT_cash_value'] = 0 dailynav['PORT_cash_value_fx'] = 0 # Create a dataframe for each position's prices for id in tickers: if is_currency(id): if id != 'BTC': continue try: # Create a new PriceData class for this ticker prices = historical_prices(id, fx=fx) prices.index = prices.index.astype('datetime64[ns]') if prices.empty: dailynav[id + '_price'] = 0 flash(f"Prices for ticker {id} could not be downloaded", "warning") save_nav = False raise ValueError(f"Ticker {id} had download issues") start_date_ticker = prices.index.min() if start_date_ticker > start_date: flash( f"NAV table starts on {start_date.strftime('%b-%d-%y')} but the ticker {id} only has pricing data from {start_date_ticker.strftime('%b-%d-%y')}. This may lead to wrong calculations on past performance.", "warning") prices = prices.rename(columns={'close_converted': id + '_price'}) prices = prices[id + '_price'] # Fill dailyNAV with prices for each ticker # First check if prices is a Series. If so, convert to dataframe if isinstance(prices, pd.Series): prices = prices.to_frame() dailynav = pd.merge(dailynav, prices, on='date', how='left') # Replace NaN with prev value, if no prev value then zero dailynav[id + '_price'].fillna(method='backfill', inplace=True) dailynav[id + '_price'].fillna(method='ffill', inplace=True) # Now let's find trades for this ticker and include in dailynav tradedf = df[[ 'trade_asset_ticker', 'trade_quantity', 'cash_value_fx' ]] # Filter trades only for this ticker tradedf = tradedf[tradedf['trade_asset_ticker'] == id] # consolidate all trades in a single date Input tradedf = tradedf.groupby(level=0).sum() tradedf.sort_index(ascending=True, inplace=True) # include column to cumsum quant tradedf['cum_quant'] = tradedf['trade_quantity'].cumsum() # merge with dailynav - 1st rename columns to match tradedf.index.rename('date', inplace=True) # rename columns to include ticker name so it's differentiated # when merged with other ids tradedf.rename(columns={ 'trade_quantity': id + '_quant', 'cum_quant': id + '_pos', 'cash_value_fx': id + '_cash_value_fx' }, inplace=True) # merge tradedf.index = tradedf.index.astype('datetime64[ns]') dailynav = pd.merge(dailynav, tradedf, on='date', how='left') # for empty days just trade quantity = 0, same for CV dailynav[id + '_quant'].fillna(0, inplace=True) dailynav[id + '_cash_value_fx'].fillna(0, inplace=True) # Now, for positions, fill with previous values, NOT zero, # unless there's no previous dailynav[id + '_pos'].fillna(method='ffill', inplace=True) dailynav[id + '_pos'].fillna(0, inplace=True) # Calculate USD and fx position and % of portfolio at date # Calculate USD position and % of portfolio at date dailynav[id + '_fx_pos'] = dailynav[id + '_price'].astype( float) * dailynav[id + '_pos'].astype(float) # Before calculating NAV, clean the df for small # dust positions. Otherwise, a portfolio close to zero but with # 10 sats for example, would still have NAV changes dailynav[id + '_fx_pos'].round(2) except Exception as e: flash(f"An error has ocurred {str(e)}", "danger") # Another loop to sum the portfolio values - maybe there is a way to # include this on the loop above. But this is not a huge time drag unless # there are too many tickers in a portfolio for id in tickers: if is_currency(id): continue # Include totals in new columns try: dailynav['PORT_fx_pos'] = dailynav['PORT_fx_pos'] +\ dailynav[id + '_fx_pos'] except KeyError as e: save_nav = False flash( "Ticker " + id + " was not found on NAV table. " + "NAV calculations will be off. Error: " + str(e), "danger") continue dailynav['PORT_cash_value_fx'] = dailynav['PORT_cash_value_fx'] +\ dailynav[id + '_cash_value_fx'] # Now that we have the full portfolio value each day, calculate alloc % for id in tickers: if is_currency(id): continue try: dailynav[id + "_fx_perc"] = dailynav[id + '_fx_pos'] /\ dailynav['PORT_fx_pos'] dailynav[id + "_fx_perc"].fillna(0, inplace=True) except KeyError: continue # Drop duplicates dailynav = dailynav[~dailynav.index.duplicated(keep='first')] # Create a new column with the portfolio change only due to market move # discounting all cash flows for that day dailynav['adj_portfolio_fx'] = dailynav['PORT_fx_pos'] -\ dailynav['PORT_cash_value_fx'] # For the period return let's use the Modified Dietz Rate of return method # more info here: https://tinyurl.com/y474gy36 # There is one caveat here. If end value is zero (i.e. portfolio fully # redeemed, the formula needs to be adjusted) dailynav.loc[dailynav.PORT_fx_pos > min_size_for_calc, 'port_dietz_ret_fx'] = ((dailynav['PORT_fx_pos'] - dailynav['PORT_fx_pos'].shift(1)) - dailynav['PORT_cash_value_fx']) /\ (dailynav['PORT_fx_pos'].shift(1) + abs(dailynav['PORT_cash_value_fx'])) # Fill empty and NaN with zero dailynav['port_dietz_ret_fx'].fillna(0, inplace=True) dailynav['adj_port_chg_fx'] = ( (dailynav['PORT_fx_pos'] - dailynav['PORT_fx_pos'].shift(1)) - dailynav['PORT_cash_value_fx']) # let's fill NaN with zeros dailynav['adj_port_chg_fx'].fillna(0, inplace=True) # Calculate the metrics dailynav['port_perc_factor_fx'] = (dailynav['port_dietz_ret_fx']) + 1 dailynav['NAV_fx'] = dailynav['port_perc_factor_fx'].cumprod() dailynav['NAV_fx'] = dailynav['NAV_fx'] * 100 dailynav['PORT_ac_CFs_fx'] = dailynav['PORT_cash_value_fx'].cumsum() dailynav['PORT_VALUE_BTC'] = dailynav['PORT_fx_pos'] / \ dailynav['BTC_price'] # Save NAV Locally as Pickle if save_nav: filename = "warden/" + user + FX + ".nav" filename = os.path.join(home_path(), filename) # makesure file path exists try: os.makedirs(os.path.dirname(filename)) except OSError as e: if e.errno != 17: raise dailynav.to_pickle(filename) return dailynav
def find_data(ticker): notes = None last_up_source = None source = None try: # Parse the cryptocompare data price = multi_price["RAW"][ticker][fx]["PRICE"] # GBTC should not be requested from multi_price as there is a # coin with same ticker if ticker in ['GBTC', 'MSTR', 'TSLA', 'SQ']: raise KeyError price = float(price) high = float(multi_price["RAW"][ticker][fx]["HIGHDAY"]) low = float(multi_price["RAW"][ticker][fx]["LOWDAY"]) chg = multi_price["RAW"][ticker][fx]["CHANGEPCT24HOUR"] mktcap = multi_price["DISPLAY"][ticker][fx]["MKTCAP"] volume = multi_price["DISPLAY"][ticker][fx]["VOLUME24HOURTO"] last_up_source = multi_price["RAW"][ticker][fx]["LASTUPDATE"] source = multi_price["DISPLAY"][ticker][fx]["LASTMARKET"] last_update = datetime.now() except (KeyError, TypeError): # Couldn't find price with CryptoCompare. Let's try a different source # and populate data in the same format [aa = alphavantage] try: single_price = realtime_price(ticker) if single_price is None: raise KeyError price = clean_float(single_price['price']) last_up_source = last_update = single_price['time'] try: chg = parseNumber(single_price['chg']) except Exception: chg = 0 try: source = last_up_source = single_price['source'] except Exception: source = last_up_source = '-' try: high = single_price['high'] low = single_price['low'] mktcap = volume = '-' except Exception: mktcap = high = low = volume = '-' except Exception: try: # Finally, if realtime price is unavailable, find the latest # saved value in historical prices # Create a price class price_class = historical_prices(ticker, fx) if price_class is None: raise KeyError price = clean_float( price_class.df['close_converted'].iloc[0]) high = '-' low = '-' volume = '-' mktcap = chg = 0 source = last_up_source = 'Historical Data' last_update = price_class.df.index[0] except Exception as e: price = high = low = chg = mktcap = last_up_source = last_update = volume = 0 source = '-' logging.error( f"There was an error getting the price for {ticker}." + f"Error: {e}") if ticker.upper() == 'BTC': nonlocal btc_price btc_price = price # check if 24hr change is indeed 24h or data is old, if so 24hr change = 0 try: checker = last_update if not isinstance(checker, datetime): checker = parser.parse(last_update) if checker < (datetime.now() - timedelta(days=1)): chg = 0 except Exception: pass return price, last_update, high, low, chg, mktcap, last_up_source, volume, source, notes