def call_grufity(self, _): """Process grufity command""" webbrowser.open(f"https://grufity.com/stock/{self.ticker}") console.print("")
def get_candles_dataframe(instrument: Union[str, None] = None, granularity: str = "D", candlecount: int = 180) -> Union[pd.DataFrame, bool]: """Request data for candle chart. Parameters ---------- instrument : str Loaded currency pair code granularity : str, optional Data granularity, by default "D" candlecount : int, optional Limit for the number of data points, by default 180 Returns ------- Union[pd.DataFrame, bool] Candle chart data or False """ if instrument is None: console.print( "Error: An instrument should be loaded before running this command." ) return False parameters = { "granularity": granularity, "count": candlecount, } if client is None: return False try: request = instruments.InstrumentsCandles(instrument, params=parameters) response = client.request(request) candles_data = [] for i in range(len(response["candles"])): candles_data.append({ "Date": response["candles"][i]["time"][:10] + " " + response["candles"][i]["time"][11:19], "Open": float(response["candles"][i]["mid"]["o"]), "High": float(response["candles"][i]["mid"]["h"]), "Low": float(response["candles"][i]["mid"]["l"]), "Close": float(response["candles"][i]["mid"]["c"]), "Volume": response["candles"][i]["volume"], }) if len(candles_data) == 0: df_candles = pd.DataFrame() else: df_candles = pd.DataFrame(candles_data) df_candles.set_index("Date", inplace=True) df_candles.index = pd.to_datetime(df_candles.index) return df_candles except V20Error as e: d_error = json.loads(e.msg) console.print(d_error["errorMessage"], "\n") return False
def account_summary_request( accountID: str = account) -> Union[pd.DataFrame, bool]: """Request Oanda account summary. Parameters ---------- accountID : str, optional Oanda account ID, by default cfg.OANDA_ACCOUNT Returns ------- Union[pd.DataFrame, bool] Account summary data or False """ if accountID == "REPLACE_ME": console.print("Error: Oanda account credentials are required.") return False if client is None: return False try: request = accounts.AccountSummary(accountID=accountID) response = client.request(request) df_summary = pd.DataFrame([ { "Type": "Balance", "Value": response["account"]["balance"] }, { "Type": "NAV", "Value": response["account"]["NAV"] }, { "Type": "Unrealized P/L", "Value": response["account"]["unrealizedPL"], }, { "Type": "Total P/L", "Value": response["account"]["pl"] }, { "Type": "Open Trade Count", "Value": response["account"]["openTradeCount"], }, { "Type": "Margin Available", "Value": response["account"]["marginAvailable"], }, { "Type": "Margin Used", "Value": response["account"]["marginUsed"] }, { "Type": "Margin Closeout", "Value": response["account"]["marginCloseoutNAV"], }, { "Type": "Margin Closeout Percent", "Value": response["account"]["marginCloseoutPercent"], }, { "Type": "Margin Closeout Position Value", "Value": response["account"]["marginCloseoutPositionValue"], }, ]) return df_summary except V20Error as e: d_error = json.loads(e.msg) console.print(d_error["errorMessage"], "\n") return False
def menu(self, custom_path_menu_above: str = ""): an_input = "HELP_ME" while True: # There is a command in the queue if self.queue and len(self.queue) > 0: # If the command is quitting the menu we want to return in here if self.queue[0] in ("q", "..", "quit"): # Go back to the root in order to go to the right directory because # there was a jump between indirect menus if custom_path_menu_above: self.queue.insert(1, custom_path_menu_above) if len(self.queue) > 1: return self.queue[1:] if gtff.ENABLE_EXIT_AUTO_HELP: return ["help"] return [] # Consume 1 element from the queue an_input = self.queue[0] self.queue = self.queue[1:] # Print location because this was an instruction and we want user to know the action if (an_input and an_input != "home" and an_input.split(" ")[0] in self.controller_choices): console.print(f"{get_flair()} {self.PATH} $ {an_input}") # Get input command from user else: # Display help menu when entering on this menu from a level above if an_input == "HELP_ME": self.print_help() try: # Get input from user using auto-completion if session and gtff.USE_PROMPT_TOOLKIT: an_input = session.prompt( f"{get_flair()} {self.PATH} $ ", completer=self.completer, search_ignore_case=True, ) # Get input from user without auto-completion else: an_input = input(f"{get_flair()} {self.PATH} $ ") except KeyboardInterrupt: # Exit in case of keyboard interrupt an_input = "exit" try: # Process the input command self.queue = self.switch(an_input) except SystemExit: console.print( f"\nThe command '{an_input}' doesn't exist on the {self.PATH} menu.", end="", ) similar_cmd = difflib.get_close_matches( an_input.split(" ")[0] if " " in an_input else an_input, self.controller_choices, n=1, cutoff=0.7, ) if similar_cmd: if " " in an_input: candidate_input = ( f"{similar_cmd[0]} {' '.join(an_input.split(' ')[1:])}" ) if candidate_input == an_input: an_input = "" self.queue = [] console.print("\n") continue an_input = candidate_input else: an_input = similar_cmd[0] console.print(f" Replacing by '{an_input}'.") self.queue.insert(0, an_input) else: console.print("\n")
def call_load(self, other_args): """Process load command""" parser = argparse.ArgumentParser( add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter, prog="load", description="Load crypto currency to perform analysis on." "Available data sources are CoinGecko, CoinPaprika, Binance, Coinbase" "By default main source used for analysis is CoinGecko (cg). To change it use --source flag", ) parser.add_argument( "-c", "--coin", help="Coin to get", dest="coin", type=str, required="-h" not in other_args, ) parser.add_argument( "--source", help="Source of data", dest="source", choices=("cp", "cg", "bin", "cb"), default="cg", required=False, ) parser.add_argument( "-s", "--start", type=valid_date_in_past, default=(datetime.now() - timedelta(days=366)).strftime("%Y-%m-%d"), dest="start", help="The starting date (format YYYY-MM-DD) of the crypto", ) parser.add_argument( "--vs", help="Quote currency (what to view coin vs)", dest="vs", default="usd", type=str, ) parser.add_argument( "-i", "--interval", help="Interval to get data (Only available on binance/coinbase)", dest="interval", default="1day", type=str, ) if other_args and "-" not in other_args[0][0]: other_args.insert(0, "-c") ns_parser = parse_known_args_and_warn(parser, other_args) delta = (datetime.now() - ns_parser.start).days if ns_parser: source = ns_parser.source for arg in ["--source", source]: if arg in other_args: other_args.remove(arg) res = ns_parser.resolution if delta < 90 else "1D" self.resolution = res # TODO: protections in case None is returned ( self.coin, self.source, self.symbol, self.coin_map_df, self.current_df, self.current_currency, ) = cryptocurrency_helpers.load( coin=ns_parser.coin, source=ns_parser.source, should_load_ta_data=True, days=delta, interval=ns_parser.interval, vs=ns_parser.vs, ) if self.symbol: self.current_interval = ns_parser.interval first_price = self.current_df["Close"].iloc[0] last_price = self.current_df["Close"].iloc[-1] second_last_price = self.current_df["Close"].iloc[-2] interval_change = calc_change(last_price, second_last_price) since_start_change = calc_change(last_price, first_price) if isinstance(self.current_currency, str) and self.PATH == "/crypto/": col = "green" if interval_change > 0 else "red" self.price_str = f"""Current Price: {round(last_price,2)} {self.current_currency.upper()} Performance in interval ({self.current_interval}): [{col}]{round(interval_change,2)}%[/{col}] Performance since {ns_parser.start.strftime('%Y-%m-%d')}: [{col}]{round(since_start_change,2)}%[/{col}]""" # noqa console.print(f""" Loaded {self.coin} against {self.current_currency} from {CRYPTO_SOURCES[self.source]} source {self.price_str} """) # noqa else: console.print( f"{delta} Days of {self.coin} vs {self.current_currency} loaded with {res} resolution.\n" )
def display_line( data: pd.Series, title: str = "", log_y: bool = True, draw: bool = False, markers_lines: Optional[List[datetime]] = None, markers_scatter: Optional[List[datetime]] = None, export: str = "", external_axes: Optional[List[plt.Axes]] = None, ): """Display line plot of data Parameters ---------- data: pd.Series Data to plot title: str Title for plot log_y: bool Flag for showing y on log scale draw: bool Flag for drawing lines and annotating on the plot markers_lines: Optional[List[datetime]] List of dates to highlight using vertical lines markers_scatter: Optional[List[datetime]] List of dates to highlight using scatter export: str Format to export data external_axes : Optional[List[plt.Axes]], optional External axes (1 axis is expected in the list), by default None """ # This plot has 1 axis if external_axes is None: _, ax = plt.subplots( figsize=plot_autoscale(), dpi=PLOT_DPI, ) else: if len(external_axes) != 1: logger.error("Expected list of one axis item.") console.print("[red]Expected list of 1 axis items./n[/red]") return (ax, ) = external_axes if log_y: ax.semilogy(data.index, data.values) ax.yaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter()) ax.yaxis.set_major_locator( matplotlib.ticker.LogLocator(base=100, subs=[1.0, 2.0, 5.0, 10.0])) ax.ticklabel_format(style="plain", axis="y") else: ax.plot(data.index, data.values) if markers_lines: ymin, ymax = ax.get_ylim() ax.vlines(markers_lines, ymin, ymax, color="#00AAFF") if markers_scatter: for n, marker_date in enumerate(markers_scatter): price_location_idx = data.index.get_loc(marker_date, method="nearest") # algo to improve text placement of highlight event number if (0 < price_location_idx < (len(data) - 1) and data.iloc[price_location_idx - 1] > data.iloc[price_location_idx] and data.iloc[price_location_idx + 1] > data.iloc[price_location_idx]): text_loc = (0, -20) else: text_loc = (0, 10) ax.annotate( str(n + 1), (mdates.date2num(marker_date), data.iloc[price_location_idx]), xytext=text_loc, textcoords="offset points", ) ax.scatter( marker_date, data.iloc[price_location_idx], color="#00AAFF", s=100, ) data_type = data.name ax.set_ylabel(data_type) ax.set_xlim(data.index[0], data.index[-1]) ax.ticklabel_format(style="plain", axis="y") ax.get_yaxis().set_major_formatter( matplotlib.ticker.FuncFormatter( lambda x, _: lambda_long_number_format(x))) if title: ax.set_title(title) if draw: LineAnnotateDrawer(ax).draw_lines_and_annotate() theme.style_primary_axis(ax) if external_axes is None: theme.visualize_output() export_data( export, os.path.dirname(os.path.abspath(__file__)).replace("common", "stocks"), "line", )
def call_quit(self, _) -> None: """Process quit menu command""" self.save_class() console.print("") self.queue.insert(0, "quit")
def call_macroaxis(self, _): """Process macroaxis command""" webbrowser.open( f"https://www.macroaxis.com/invest/market/{self.ticker}") console.print("")
def call_add(self, other_args: List[str]): """Process add command""" parser = argparse.ArgumentParser( add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter, prog="add", description="Adds an item to your portfolio", ) ns_parser = parse_known_args_and_warn(parser, other_args) if not ns_parser: return console.print() inputs: Dict[str, Union[str, float, int]] = {} type_ = input("Type (stock, cash): \n") if type_ not in ["stock", "cash"]: logger.warning("Currently only stocks or cash supported.") console.print( "[red]Currently only stocks or cash supported.[/red]\n") type_ = input("Type (stock, cash): \n") if type_ not in ["stock", "cash"]: logger.error("Two unsuccessful attempts. Exiting add") console.print( "[red]Two unsuccessful attempts. Exiting add.[/red]\n") return inputs["Type"] = type_.lower() action = input("Action: (buy, sell, deposit, withdraw): \n").lower() if type_ == "cash": if action not in ["deposit", "withdraw"]: console.print("Cash can only be deposit or withdraw\n") action = input( "Action: (buy, sell, deposit, withdraw): \n").lower() if action not in ["deposit", "withdraw"]: logger.error("Two unsuccessful attempts. Exiting add") console.print( "[red]Two unsuccessful attempts. Exiting add.[/red]\n" ) return elif type_ == "stock": if action not in ["buy", "sell"]: console.print("Stock can only be buy or sell\n") if action not in ["buy", "sell"]: logger.error("Two unsuccessful attempts. Exiting add") console.print( "[red]Two unsuccessful attempts. Exiting add.[/red]\n" ) return inputs["Side"] = action.lower() inputs["Name"] = input("Name (ticker or cash [if depositing cash]):\n") inputs["Date"] = valid_date( input("Purchase date (YYYY-MM-DD): \n")).strftime("%Y-%m-%d") inputs["Quantity"] = float(input("Quantity: \n")) inputs["Price"] = float(input("Price per share: \n")) inputs["Fees"] = float(input("Fees: \n")) inputs["Premium"] = "" if self.portfolio.empty: self.portfolio = portfolio_model.Portfolio( pd.DataFrame.from_dict(inputs, orient="index").T) console.print( f"Portfolio successfully initiialized with {inputs['Name']}.\n" ) return self.portfolio.add_trade(inputs) console.print(f"{inputs['Name']} successfully added\n")
def call_newsfilter(self, _): """Process newsfilter command""" webbrowser.open(f"https://newsfilter.io/search?query={self.ticker}") console.print("")
def call_stockanalysis(self, _): """Process stockanalysis command""" webbrowser.open(f"https://stockanalysis.com/stocks/{self.ticker}/") console.print("")
def call_macrotrends(self, _): """Process macrotrends command""" webbrowser.open( f"https://www.macrotrends.net/stocks/charts/{self.ticker}/{self.ticker}/market-cap" ) console.print("")
def call_zacks(self, _): """Process zacks command""" webbrowser.open(f"https://www.zacks.com/stock/quote/{self.ticker}") console.print("")
def call_fintel(self, _): """Process fintel command""" webbrowser.open(f"https://fintel.io/s/us/{self.ticker}") console.print("")
def display_seasonal( name: str, df: pd.DataFrame, target: str, multiplicative: bool = False, export: str = "", external_axes: Optional[List[plt.Axes]] = None, ): """Display seasonal decomposition data Parameters ---------- name : str Name of dataset df : pd.DataFrame DataFrame target : str Column of data to look at multiplicative : bool Boolean to indicate multiplication instead of addition export : str Format to export trend and cycle df external_axes : Optional[List[plt.Axes]], optional External axes (6 axes are expected in the list), by default None """ data = df[target] result, cycle, trend = qa_model.get_seasonal_decomposition( data, multiplicative) plot_data = pd.merge( data, result.trend, how="outer", left_index=True, right_index=True, suffixes=("", "_result.trend"), ) plot_data = pd.merge( plot_data, result.seasonal, how="outer", left_index=True, right_index=True, suffixes=("", "_result.seasonal"), ) plot_data = pd.merge( plot_data, result.resid, how="outer", left_index=True, right_index=True, suffixes=("", "_result.resid"), ) plot_data = pd.merge( plot_data, cycle, how="outer", left_index=True, right_index=True, suffixes=("", "_cycle"), ) plot_data = pd.merge( plot_data, trend, how="outer", left_index=True, right_index=True, suffixes=("", "_trend"), ) plot_data = reindex_dates(plot_data) # This plot has 1 axis if external_axes is None: fig, axes = plt.subplots( 4, 1, sharex=True, figsize=plot_autoscale(), dpi=PLOT_DPI, ) (ax1, ax2, ax3, ax4) = axes else: if len(external_axes) != 4: logger.error("Expected list of four axis items.") console.print("[red]Expected list of 4 axis items./n[/red]") return (ax1, ax2, ax3, ax4) = external_axes colors = iter(theme.get_colors()) ax1.set_title(f"{name} (Time-Series) {target} seasonal decomposition") ax1.plot(plot_data.index, plot_data[target].values, color=next(colors), label="Values") ax1.set_xlim([plot_data.index[0], plot_data.index[-1]]) ax1.legend() # Multiplicative model ax2.plot(plot_data["trend"], color=theme.down_color, label="Cyclic-Trend") ax2.plot( plot_data["trend_cycle"], color=theme.up_color, linestyle="--", label="Cycle component", ) ax2.legend() ax3.plot(plot_data["trend_trend"], color=next(colors), label="Trend component") ax3.plot(plot_data["seasonal"], color=next(colors), label="Seasonal effect") ax3.legend() ax4.plot(plot_data["resid"], color=next(colors), label="Residuals") ax4.legend() theme.style_primary_axis(ax1) theme.style_primary_axis(ax2) theme.style_primary_axis(ax3) theme.style_primary_axis( ax4, data_index=plot_data.index.to_list(), tick_labels=plot_data["date"].to_list(), ) if external_axes is None: fig.tight_layout(pad=theme.tight_layout_padding) fig.subplots_adjust(hspace=0.1, ) theme.visualize_output(force_tight_layout=False) # From # https://otexts.com/fpp2/seasonal-strength.html console.print("Time-Series Level is " + str(round(data.mean(), 2))) Ft = max(0, 1 - np.var(result.resid)) / np.var(result.trend + result.resid) console.print(f"Strength of Trend: {Ft:.4f}") Fs = max( 0, 1 - np.var(result.resid) / np.var(result.seasonal + result.resid), ) console.print(f"Strength of Seasonality: {Fs:.4f}\n") export_data( export, os.path.dirname(os.path.abspath(__file__)).replace("common", "stocks"), "summary", cycle.join(trend), )
def call_build(self, other_args: List[str]): """Process build command""" parser = argparse.ArgumentParser( add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter, prog="build", description="Build portfolio from list of tickers and weights", ) parser.add_argument( "-s", "--start", help="Start date.", dest="start", default="2021-01-04", type=valid_date, required="-h" not in other_args, ) parser.add_argument( "-t", "--tickers", type=str, help= "List of symbols separated by commas (i.e AAPL,BTC,DOGE,SPY....)", dest="tickers", required="-h" not in other_args, ) parser.add_argument( "-c", "--class", help="Asset class (stock, crypto, etf), separated by commas.", dest="classes", type=str, required="-h" not in other_args, ) parser.add_argument( "-w", "--weights", help="List of weights, separated by comma", type=str, dest="weights", required="-h" not in other_args, ) parser.add_argument( "-a", "--amount", help="Amount to allocate initially.", dest="amount", default=100_000, type=check_positive, ) ns_parser = parse_known_args_and_warn(parser, other_args) if ns_parser: list_of_tickers = ns_parser.tickers.split(",") types = ns_parser.classes.split(",") weights = [float(w) for w in ns_parser.weights.split(",")] self.portfolio = portfolio_model.Portfolio.from_custom_inputs_and_weights( start_date=ns_parser.start.strftime("%Y-%m-%d"), list_of_symbols=list_of_tickers, list_of_weights=weights, list_of_types=types, amount=ns_parser.amount, ) console.print()
def display_hist( name: str, df: pd.DataFrame, target: str, bins: int, external_axes: Optional[List[plt.Axes]] = None, ): """Generate of histogram of data Parameters ---------- name : str Name of dataset df : pd.DataFrame Dataframe to look at target : str Data column to get histogram of the dataframe bins : int Number of bins in histogram external_axes : Optional[List[plt.Axes]], optional External axes (1 axis is expected in the list), by default None """ data = df[target] # This plot has 1 axis if external_axes is None: _, ax = plt.subplots( figsize=plot_autoscale(), dpi=PLOT_DPI, ) else: if len(external_axes) != 1: logger.error("Expected list of one axis item.") console.print("[red]Expected list of 1 axis items./n[/red]") return (ax, ) = external_axes sns.histplot( data, color=theme.up_color, bins=bins, kde=True, ax=ax, stat="proportion", legend=True, ) sns.rugplot(data, color=theme.down_color, ax=ax, legend=True) if isinstance(df.index[0], datetime): start = df.index[0] ax.set_title( f"Histogram of {name} {target} from {start.strftime('%Y-%m-%d')}") else: ax.set_title(f"Histogram of {name} {target}") ax.set_xlabel("Value") theme.style_primary_axis(ax) # Manually construct the chart legend proportion_legend = mpatches.Patch(color=theme.up_color, label="Univariate distribution") marginal_legend = mpatches.Patch(color=theme.down_color, label="Marginal distributions") ax.legend(handles=[proportion_legend, marginal_legend]) if external_axes is None: theme.visualize_output()
def call_var(self, other_args: List[str]): """Process var command""" parser = argparse.ArgumentParser( add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter, prog="var", description=""" Provides value at risk (short: VaR) of the selected portfolio. """, ) parser.add_argument( "-m", "--mean", action="store_true", default=False, dest="use_mean", help="If one should use the mean of the portfolio return", ) parser.add_argument( "-a", "--adjusted", action="store_true", default=False, dest="adjusted", help=""" If the VaR should be adjusted for skew and kurtosis (Cornish-Fisher-Expansion) """, ) parser.add_argument( "-s", "--student", action="store_true", default=False, dest="student_t", help=""" If one should use the student-t distribution """, ) parser.add_argument( "-p", "--percentile", action="store", dest="percentile", type=float, default=99.9, help=""" Percentile used for VaR calculations, for example input 99.9 equals a 99.9 Percent VaR """, ) ns_parser = parse_known_args_and_warn(parser, other_args) if ns_parser: if self.portfolio.empty: console.print("[red]No portfolio loaded.[/red]\n") return if ns_parser.adjusted and ns_parser.student_t: console.print( "Select the adjusted or the student_t parameter.\n") else: from gamestonk_terminal.common.quantitative_analysis import qa_view self.portfolio.generate_holdings_from_trades() qa_view.display_var( self.portfolio.returns, "Portfolio", ns_parser.use_mean, ns_parser.adjusted, ns_parser.student_t, ns_parser.percentile / 100, True, )
def call_home(self, _) -> None: """Process home command""" self.save_class() console.print("") for _ in range(self.PATH.count("/") - 1): self.queue.insert(0, "quit")
def display_k_nearest_neighbors( ticker: str, data: Union[pd.DataFrame, pd.Series], n_neighbors: int, n_input_days: int, n_predict_days: int, test_size: float, end_date: str = "", no_shuffle: bool = True, time_res: str = "", external_axes: Optional[List[plt.Axes]] = None, ): """Display predictions using knn Parameters ---------- ticker : str Stock data data : Union[pd.DataFrame, pd.Series] Data to use for ML n_neighbors : int Number of neighbors for knn n_input_days : int Length of input sequences n_predict_days : int Number of days to predict test_size : float Fraction of data for testing end_date : str, optional End date for backtesting, by default "" no_shuffle : bool, optional Flag to shuffle data randomly, by default True time_res : str Resolution for data, allowing for predicting outside of standard market days external_axes : Optional[List[plt.Axes]], optional External axes (1 axis is expected in the list), by default None """ ( forecast_data_df, preds, y_valid, y_dates_valid, scaler, ) = knn_model.get_knn_model_data( data, n_input_days, n_predict_days, n_neighbors, test_size, end_date, no_shuffle ) if forecast_data_df.empty: console.print("Issue performing data prep and prediction") return if time_res: forecast_data_df.index = pd.date_range( data.index[-1], periods=n_predict_days + 1, freq=time_res )[1:] print_pretty_prediction(forecast_data_df[0], data.values[-1]) plot_data_predictions( data=data, preds=preds, y_valid=y_valid, y_dates_valid=y_dates_valid, scaler=scaler, title=f"KNN Model with {n_neighbors} Neighbors on {ticker}", forecast_data=forecast_data_df, n_loops=1, time_str=time_res, external_axes=external_axes, ) console.print("")
def call_exit(self, _) -> None: # Not sure how to handle controller loading here """Process exit terminal command""" console.print("") for _ in range(self.PATH.count("/")): self.queue.insert(0, "quit")
def display_cdf( name: str, df: pd.DataFrame, target: str, export: str = "", external_axes: Optional[List[plt.Axes]] = None, ): """Plot Cumulative Distribution Function Parameters ---------- name : str Name of dataset df : pd.DataFrame Dataframe to look at target : str Data column export : str Format to export data external_axes : Optional[List[plt.Axes]], optional External axes (1 axis is expected in the list), by default None """ data = df[target] start = df.index[0] cdf = data.value_counts().sort_index().div(len(data)).cumsum() # This plot has 1 axis if external_axes is None: _, ax = plt.subplots( figsize=plot_autoscale(), dpi=PLOT_DPI, ) else: if len(external_axes) != 1: logger.error("Expected list of one axis item.") console.print("[red]Expected list of 1 axis items./n[/red]") return (ax, ) = external_axes cdf.plot(ax=ax) ax.set_title( f"Cumulative Distribution Function of {name} {target}\nfrom {start.strftime('%Y-%m-%d')}" ) ax.set_ylabel("Probability") ax.set_xlabel(target) minVal = data.values.min() q25 = np.quantile(data.values, 0.25) medianVal = np.quantile(data.values, 0.5) q75 = np.quantile(data.values, 0.75) labels = [ (minVal, q25), (0.25, 0.25), theme.down_color, (q25, q25), (0, 0.25), theme.down_color, (minVal, medianVal), (0.5, 0.5), theme.down_color, (medianVal, medianVal), (0, 0.5), theme.down_color, (minVal, q75), (0.75, 0.75), theme.down_color, (q75, q75), (0, 0.75), theme.down_color, ] ax.plot(*labels, ls="--") ax.text( minVal + (q25 - minVal) / 2, 0.27, "Q1", color=theme.down_color, fontweight="bold", ) ax.text( minVal + (medianVal - minVal) / 2, 0.52, "Median", color=theme.down_color, fontweight="bold", ) ax.text( minVal + (q75 - minVal) / 2, 0.77, "Q3", color=theme.down_color, fontweight="bold", ) ax.set_xlim(cdf.index[0], cdf.index[-1]) theme.style_primary_axis(ax) if external_axes is None: theme.visualize_output() export_data( export, os.path.dirname(os.path.abspath(__file__)).replace("common", "stocks"), "cdf", pd.DataFrame(cdf), )
def call_load(self, other_args: List[str]): """Process load command""" parser = argparse.ArgumentParser( add_help=False, formatter_class=argparse.ArgumentDefaultsHelpFormatter, prog="load", description= "Load stock ticker to perform analysis on. When the data source" + " is syf', an Indian ticker can be" + " loaded by using '.NS' at the end, e.g. 'SBIN.NS'. See available market in" + " https://help.yahoo.com/kb/exchanges-data-providers-yahoo-finance-sln2310.html.", ) parser.add_argument( "-t", "--ticker", action="store", dest="ticker", required="-h" not in other_args, help="Stock ticker", ) parser.add_argument( "-s", "--start", type=valid_date, default=(datetime.now() - timedelta(days=1100)).strftime("%Y-%m-%d"), dest="start", help="The starting date (format YYYY-MM-DD) of the stock", ) parser.add_argument( "-e", "--end", type=valid_date, default=datetime.now().strftime("%Y-%m-%d"), dest="end", help="The ending date (format YYYY-MM-DD) of the stock", ) parser.add_argument( "-i", "--interval", action="store", dest="interval", type=int, default=1440, choices=[1, 5, 15, 30, 60], help="Intraday stock minutes", ) parser.add_argument( "--source", action="store", dest="source", choices=["yf", "av", "iex"] if "-i" not in other_args else ["yf"], default="yf", help="Source of historical data.", ) parser.add_argument( "-p", "--prepost", action="store_true", default=False, dest="prepost", help= "Pre/After market hours. Only works for 'yf' source, and intraday data", ) parser.add_argument( "-r", "--iexrange", dest="iexrange", help= "Range for using the iexcloud api. Note that longer range requires more tokens in account", choices=["ytd", "1y", "2y", "5y", "6m"], type=str, default="ytd", ) if other_args and "-" not in other_args[0][0]: other_args.insert(0, "-t") ns_parser = parse_known_args_and_warn(parser, other_args) if ns_parser: df_stock_candidate = stocks_helper.load( ns_parser.ticker, ns_parser.start, ns_parser.interval, ns_parser.end, ns_parser.prepost, ns_parser.source, ) if not df_stock_candidate.empty: self.stock = df_stock_candidate self.add_info = stocks_helper.additional_info_about_ticker( ns_parser.ticker) console.print(self.add_info) if "." in ns_parser.ticker: self.ticker, self.suffix = ns_parser.ticker.upper().split( ".") else: self.ticker = ns_parser.ticker.upper() self.suffix = "" if ns_parser.source == "iex": self.start = self.stock.index[0].strftime("%Y-%m-%d") else: self.start = ns_parser.start self.interval = f"{ns_parser.interval}min" if self.PATH in ["/stocks/qa/", "/stocks/pred/"]: self.stock["Returns"] = self.stock["Adj Close"].pct_change( ) self.stock["LogRet"] = np.log( self.stock["Adj Close"]) - np.log( self.stock["Adj Close"].shift(1)) self.stock["LogPrice"] = np.log(self.stock["Adj Close"]) self.stock = self.stock.rename( columns={"Adj Close": "AdjClose"}) self.stock = self.stock.dropna() self.stock.columns = [ x.lower() for x in self.stock.columns ] console.print("")
def display_bw( name: str, df: pd.DataFrame, target: str, yearly: bool, external_axes: Optional[List[plt.Axes]] = None, ): """Show box and whisker plots Parameters ---------- name : str Name of dataset df : pd.DataFrame Dataframe to look at target : str Data column to look at yearly : bool Flag to indicate yearly accumulation external_axes : Optional[List[plt.Axes]], optional External axes (1 axis is expected in the list), by default None """ data = df[target] start = df.index[0] # This plot has 1 axis if external_axes is None: _, ax = plt.subplots( figsize=plot_autoscale(), dpi=PLOT_DPI, ) else: if len(external_axes) != 1: logger.error("Expected list of one axis item.") console.print("[red]Expected list of 1 axis items./n[/red]") return (ax, ) = external_axes color = theme.get_colors()[0] if yearly: x_data = data.index.year else: x_data = data.index.month box_plot = sns.boxplot( x=x_data, y=data, ax=ax, zorder=3, boxprops=dict(edgecolor=color), flierprops=dict( linestyle="--", color=color, markerfacecolor=theme.up_color, markeredgecolor=theme.up_color, ), whiskerprops=dict(color=color), capprops=dict(color=color), ) box_plot.set( xlabel=["Monthly", "Yearly"][yearly], ylabel=target, title= f"{['Monthly','Yearly'][yearly]} box plot of {name} {target} from {start.strftime('%Y-%m-%d')}", ) l_months = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", ] l_ticks = list() if not yearly: for val in box_plot.get_xticklabels(): l_ticks.append(l_months[int(val.get_text()) - 1]) box_plot.set_xticklabels(l_ticks) # remove the scientific notion on the left hand side ax.ticklabel_format(style="plain", axis="y") ax.get_yaxis().set_major_formatter( matplotlib.ticker.FuncFormatter( lambda x, _: lambda_long_number_format(x))) theme.style_primary_axis(ax) if external_axes is None: theme.visualize_output()
def create_order_request( price: int, units: int, instrument: Union[str, None] = None, accountID: str = account, ) -> Union[pd.DataFrame, bool]: """Request creation of buy/sell trade order. Parameters ---------- instrument : Union[str, None] The loaded currency pair, by default None price : int The price to set for the limit order. units : int The number of units to place in the order request. accountID : str, optional Oanda account ID, by default cfg.OANDA_ACCOUNT Returns ------- Union[pd.DataFrame, bool] Orders data or False """ if accountID == "REPLACE_ME": console.print("Error: Oanda account credentials are required.") return False if instrument is None: console.print( "Error: An instrument should be loaded before running this command." ) return False if "JPY" in instrument or "THB" in instrument or "HUF" in instrument: price = round(price, 3) else: price = round(price, 5) data = { "order": { "price": price, "instrument": instrument, "units": units, "type": "LIMIT", "timeInForce": "GTC", "positionFill": "DEFAULT", } } if client is None: return False try: request = orders.OrderCreate(accountID, data) response = client.request(request) order_data = [] order_data.append({ "Order ID": response["orderCreateTransaction"]["id"], "Instrument": response["orderCreateTransaction"]["instrument"], "Price": response["orderCreateTransaction"]["price"], "Units": response["orderCreateTransaction"]["units"], }) df_orders = pd.DataFrame.from_dict(order_data) return df_orders except V20Error as e: d_error = json.loads(e.msg) console.print(d_error["errorMessage"], "\n") return False except Exception as e: console.print(e) return False
def display_acf( name: str, df: pd.DataFrame, target: str, lags: int, external_axes: Optional[List[plt.Axes]] = None, ): """Show Auto and Partial Auto Correlation of returns and change in returns Parameters ---------- name : str Name of dataset df : pd.DataFrame Dataframe to look at target : str Data column to look at lags : int Max number of lags to look at external_axes : Optional[List[plt.Axes]], optional External axes (4 axes are expected in the list), by default None """ df = df[target] start = df.index[0] # This plot has 4 axes if external_axes is None: fig, axes = plt.subplots( nrows=2, ncols=2, figsize=plot_autoscale(), dpi=PLOT_DPI, ) (ax1, ax2), (ax3, ax4) = axes else: if len(external_axes) != 4: logger.error("Expected list of four axis items.") console.print("[red]Expected list of 4 axis items./n[/red]") return (ax1, ax2, ax3, ax4) = external_axes # Diff Auto-correlation function for original time series sm.graphics.tsa.plot_acf(np.diff(np.diff(df.values)), lags=lags, ax=ax1) ax1.set_title(f"{name} Returns Auto-Correlation", fontsize=9) # Diff Partial auto-correlation function for original time series sm.graphics.tsa.plot_pacf(np.diff(np.diff(df.values)), lags=lags, ax=ax2) ax2.set_title( f"{name} Returns Partial Auto-Correlation", fontsize=9, ) # Diff Diff Auto-correlation function for original time series sm.graphics.tsa.plot_acf(np.diff(np.diff(df.values)), lags=lags, ax=ax3) ax3.set_title( f"Change in {name} Returns Auto-Correlation", fontsize=9, ) # Diff Diff Partial auto-correlation function for original time series sm.graphics.tsa.plot_pacf(np.diff(np.diff(df.values)), lags=lags, ax=ax4) ax4.set_title( f"Change in {name} Returns Partial Auto-Correlation", fontsize=9, ) fig.suptitle( f"ACF differentials starting from {start.strftime('%Y-%m-%d')}", fontsize=15, x=0.042, y=0.95, horizontalalignment="left", verticalalignment="top", ) theme.style_primary_axis(ax1) theme.style_primary_axis(ax2) theme.style_primary_axis(ax3) theme.style_primary_axis(ax4) if external_axes is None: theme.visualize_output(force_tight_layout=True)
def get_calendar_request( days: int, instrument: Union[str, None] = None) -> Union[pd.DataFrame, bool]: """Request data of significant events calendar. Parameters ---------- instrument : Union[str, None] The loaded currency pair, by default None days : int Number of days in advance Returns ------- Union[pd.DataFrame, bool] Calendar events data or False """ if instrument is None: console.print( "Error: An instrument should be loaded before running this command." ) return False parameters = {"instrument": instrument, "period": str(days * 86400 * -1)} if client is None: return False try: request = forexlabs.Calendar(params=parameters) response = client.request(request) l_data = [] for i in enumerate(response): if "forecast" in response[i[0]]: forecast = response[i[0]]["forecast"] if response[i[0]]["unit"] != "Index": forecast += response[i[0]]["unit"] else: forecast = "" if "market" in response[i[0]]: market = response[i[0]]["market"] if response[i[0]]["unit"] != "Index": market += response[i[0]]["unit"] else: market = "" if "actual" in response[i[0]]: actual = response[i[0]]["actual"] if response[i[0]]["unit"] != "Index": actual += response[i[0]]["unit"] else: actual = "" if "previous" in response[i[0]]: previous = response[i[0]]["previous"] if response[i[0]]["unit"] != "Index": previous += response[i[0]]["unit"] else: previous = "" if "impact" in response[i[0]]: impact = response[i[0]]["impact"] else: impact = "" l_data.append({ "Title": response[i[0]]["title"], "Time": datetime.fromtimestamp(response[i[0]]["timestamp"]), "Impact": impact, "Forecast": forecast, "Market Forecast": market, "Currency": response[i[0]]["currency"], "Region": response[i[0]]["region"], "Actual": actual, "Previous": previous, }) if len(l_data) == 0: df_calendar = pd.DataFrame() else: df_calendar = pd.DataFrame(l_data) return df_calendar except V20Error as e: d_error = json.loads(e.msg) console.print(d_error["message"], "\n") return False
def display_cusum( df: pd.DataFrame, target: str, threshold: float, drift: float, external_axes: Optional[List[plt.Axes]] = None, ): """Cumulative sum algorithm (CUSUM) to detect abrupt changes in data Parameters ---------- df : pd.DataFrame Dataframe target : str Column of data to look at threshold : float Threshold value drift : float Drift parameter external_axes : Optional[List[plt.Axes]], optional External axes (2 axes are expected in the list), by default None """ target_series = df[target].values # The code for this plot was adapted from detecta's sources because at the # time of writing this detect_cusum had a bug related to external axes support. # see https://github.com/demotu/detecta/pull/3 tap, tan = 0, 0 ta, tai, taf, _ = detect_cusum( x=target_series, threshold=threshold, drift=drift, ending=True, show=False, ) # Thus some variable names are left unchanged and unreadable... gp, gn = np.zeros(target_series.size), np.zeros(target_series.size) for i in range(1, target_series.size): s = target_series[i] - target_series[i - 1] gp[i] = gp[i - 1] + s - drift # cumulative sum for + change gn[i] = gn[i - 1] - s - drift # cumulative sum for - change if gp[i] < 0: gp[i], tap = 0, i if gn[i] < 0: gn[i], tan = 0, i if gp[i] > threshold or gn[i] > threshold: # change detected! ta = np.append(ta, i) # alarm index tai = np.append(tai, tap if gp[i] > threshold else tan) # start gp[i], gn[i] = 0, 0 # reset alarm if external_axes is None: _, axes = plt.subplots( 2, 1, sharex=True, figsize=plot_autoscale(), dpi=PLOT_DPI, ) (ax1, ax2) = axes else: if len(external_axes) != 2: logger.error("Expected list of two axis items.") console.print("[red]Expected list of 2 axis items./n[/red]") return (ax1, ax2) = external_axes target_series_indexes = range(df[target].size) ax1.plot(target_series_indexes, target_series) if len(ta): ax1.plot( tai, target_series[tai], ">", markerfacecolor=theme.up_color, markersize=5, label="Start", ) ax1.plot( taf, target_series[taf], "<", markerfacecolor=theme.down_color, markersize=5, label="Ending", ) ax1.plot( ta, target_series[ta], "o", markerfacecolor=theme.get_colors()[-1], markeredgecolor=theme.get_colors()[-2], markeredgewidth=1, markersize=3, label="Alarm", ) ax1.legend() ax1.set_xlim(-0.01 * target_series.size, target_series.size * 1.01 - 1) ax1.set_ylabel("Amplitude") ymin, ymax = ( target_series[np.isfinite(target_series)].min(), target_series[np.isfinite(target_series)].max(), ) y_range = ymax - ymin if ymax > ymin else 1 ax1.set_ylim(ymin - 0.1 * y_range, ymax + 0.1 * y_range) ax1.set_title( "Time series and detected changes " + f"(threshold= {threshold:.3g}, drift= {drift:.3g}): N changes = {len(tai)}", fontsize=10, ) theme.style_primary_axis(ax1) ax2.plot(target_series_indexes, gp, label="+") ax2.plot(target_series_indexes, gn, label="-") ax2.set_xlim(-0.01 * target_series.size, target_series.size * 1.01 - 1) ax2.set_xlabel("Data points") ax2.set_ylim(-0.01 * threshold, 1.1 * threshold) ax2.axhline(threshold) theme.style_primary_axis(ax2) ax2.set_title( "Time series of the cumulative sums of positive and negative changes", fontsize=10, ) ax2.legend() if external_axes is None: theme.visualize_output()
def conv1d_model( data: Union[pd.Series, pd.DataFrame], n_input: int, n_predict: int, learning_rate: float, epochs: int, batch_size: int, test_size: float, n_loops: int, no_shuffle: bool, ) -> Tuple[pd.DataFrame, np.ndarray, np.ndarray, np.ndarray, Any]: """Train Conv1D model on data based on config params Parameters ---------- data : Union[pd.Series, pd.DataFrame] Data to fit n_input : int Length of input sequence n_predict : int Length of output to predict learning_rate : float Learning rate for optimizer epochs : int Number of training epochs batch_size : int Model batch size test_size : float Fraction of test size n_loops : int Number of loops to train model no_shuffle : bool Flag to not shuffle data Returns ------- pd.DataFrame Dataframe of predictions np.array Array of validation predictions np.array Array of validation data np.array Array of validation x label data Any Scaler used for data """ ( X_train, X_valid, y_train, y_valid, _, _, _, y_dates_valid, forecast_data_input, dates_forecast_input, scaler, is_error, ) = prepare_scale_train_valid_test(data, n_input, n_predict, test_size, "", no_shuffle) if is_error: return pd.DataFrame(), np.array(0), np.array(0), np.array(0), None console.print( f"Training on {X_train.shape[0]} sequences of length {X_train.shape[1]}. Using {X_valid.shape[0]} sequences " f" of length {X_valid.shape[1]} for validation. Model will run {n_loops} loops" ) future_dates = get_next_stock_market_days(dates_forecast_input[-1], n_next_days=n_predict) preds = np.zeros((n_loops, X_valid.shape[0], n_predict)) forecast_data = np.zeros((n_loops, n_predict)) for i in range(n_loops): # Build Neural Network model model = build_neural_network_model( cfg_nn_models.Convolutional, n_input, n_predict, ) model.compile( optimizer=optimizers[cfg_nn_models.Optimizer]( learning_rate=learning_rate), loss=cfg_nn_models.Loss, ) model.fit( X_train.reshape(X_train.shape[0], X_train.shape[1], 1), y_train, epochs=epochs, verbose=True, batch_size=batch_size, validation_data=( X_valid.reshape(X_valid.shape[0], X_valid.shape[1], 1), y_valid, ), callbacks=[es], ) preds[i] = model.predict( X_valid.reshape(X_valid.shape[0], X_valid.shape[1], 1)).reshape(X_valid.shape[0], n_predict) forecast_data[i] = forecast(forecast_data_input, future_dates, model, scaler).values.flat forecast_data_df = pd.DataFrame(forecast_data.T, index=future_dates) return forecast_data_df, preds, y_valid, y_dates_valid, scaler
def call_barchart(self, _): """Process barchart command""" webbrowser.open( f"https://www.barchart.com/stocks/quotes/{self.ticker}/overview") console.print("")