def plot_strategy_group_leverage(self): pf = PlotFactory() gp = GraphProperties() gp = self.create_graph_properties("Leverage", "Group Leverage") pf.plot_line_graph(self.reduce_plot(self._strategy_group_leverage), adapter = self.DEFAULT_PLOT_ENGINE, gp = gp)
def plot_strategy_group_benchmark_pnl(self, strip = None): pf = PlotFactory() gp = self.create_graph_properties("", "Group Benchmark PnL - cumulative") strat_list = self._strategy_group_benchmark_pnl.columns #.sort_values() for line in strat_list: self.logger.info(line) # plot cumulative line of returns pf.plot_line_graph(self.reduce_plot(self._strategy_group_benchmark_pnl), adapter = self.DEFAULT_PLOT_ENGINE, gp = gp) # needs write stats flag turned on try: keys = self._strategy_group_benchmark_tsd.keys() ir = [] for key in keys: ir.append(self._strategy_group_benchmark_tsd[key].inforatio()[0]) if strip is not None: keys = [k.replace(strip, '') for k in keys] ret_stats = pandas.DataFrame(index = keys, data = ir, columns = ['IR']) # ret_stats = ret_stats.sort_index() gp.file_output = self.DUMP_PATH + self.FINAL_STRATEGY + ' (Group Benchmark PnL - IR) ' + gp.SCALE_FACTOR + '.png' gp.file_output = self.DUMP_PATH + self.FINAL_STRATEGY + ' (Group Benchmark PnL - IR) ' + gp.SCALE_FACTOR + '.html' gp.display_brand_label = False # plot ret stats pf.plot_bar_graph(ret_stats, adapter = self.DEFAULT_PLOT_ENGINE, gp = gp) except: pass
def plot_individual_leverage(self): pf = PlotFactory() gp = self.create_graph_properties("Leverage", "Individual Leverage") try: pf.plot_line_graph(self.reduce_plot(self._individual_leverage), adapter = self.DEFAULT_PLOT_ENGINE, gp = gp) except: pass
def plot_strategy_pnl(self): pf = PlotFactory() gp = self.create_graph_properties("", "Strategy PnL") try: pf.plot_line_graph(self.reduce_plot(self._strategy_pnl), adapter = self.DEFAULT_PLOT_ENGINE, gp = gp) except: pass
def plot_strategy_group_benchmark_annualised_pnl(self, cols = None): # TODO - unfinished, needs checking! if cols is None: cols = self._strategy_group_benchmark_annualised_pnl.columns pf = PlotFactory() gp = self.create_graph_properties("", "Group Benchmark Annualised PnL") gp.color = ['red', 'blue', 'purple', 'gray', 'yellow', 'green', 'pink'] pf.plot_line_graph(self.reduce_plot(self._strategy_group_benchmark_annualised_pnl[cols]), adapter = self.DEFAULT_PLOT_ENGINE, gp = gp)
def plot_strategy_group_pnl_trades(self): pf = PlotFactory() gp = self.create_graph_properties("(bp)", "Individual Trade PnL") # zero when there isn't a trade exit # strategy_pnl_trades = self._strategy_pnl_trades * 100 * 100 # strategy_pnl_trades = strategy_pnl_trades.dropna() # note only works with single large basket trade try: strategy_pnl_trades = self._strategy_pnl_trades.fillna(0) * 100 * 100 pf.plot_line_graph(self.reduce_plot(strategy_pnl_trades), adapter = self.DEFAULT_PLOT_ENGINE, gp = gp) except: pass
df = None df = ltsf.harvest_time_series(time_series_request) tsc = TimeSeriesCalcs() df = tsc.calculate_returns(df) df = tsc.rolling_corr(df['EURUSD.close'], 20, data_frame2=df[['GBPUSD.close', 'AUDUSD.close']]) gp = GraphProperties() gp.title = "1M FX rolling correlations" gp.scale_factor = 3 pf = PlotFactory() pf.plot_line_graph(df, adapter='pythalesians', gp=gp) ###### download daily data from Bloomberg for AUD/JPY, NZD/JPY spot with S&P500, then calculate correlation if True: time_series_request = TimeSeriesRequest( start_date="01 Jan 2015", # start date finish_date=datetime.date.today(), # finish date freq='daily', # daily data data_source='bloomberg', # use Bloomberg as data source tickers=[ 'AUDJPY', # ticker (Thalesians) 'NZDJPY', 'S&P500' ], fields=['close'], # which fields to download vendor_tickers=[
df = ltsf.harvest_time_series(time_series_request) df.columns = [x.replace('.close', '') for x in df.columns.values] df = tsc.calculate_returns(df) * 100 df = df.dropna() df_sorted = tsc.get_bottom_valued_sorted(df, "USDBRL", n = 20) # df = tsc.get_top_valued_sorted(df, "USDBRL", n = 20) # get biggest up moves # get values on day after df2 = df.shift(-1) df2 = df2.ix[df_sorted.index] df2.columns = ['T+1'] df_sorted.columns = ['T'] df_sorted = df_sorted.join(df2) df_sorted.index = [str(x.year) + '/' + str(x.month) + '/' + str(x.day) for x in df_sorted.index] gp = GraphProperties() gp.title = 'Largest daily falls in USDBRL' gp.scale_factor = 3 gp.display_legend = True gp.chart_type = 'bar' gp.x_title = 'Dates' gp.y_title = 'Pc' gp.file_output = "usdbrl-biggest-downmoves.png" pf = PlotFactory() pf.plot_line_graph(df_sorted, adapter = 'pythalesians', gp=gp)
# plot total return series comparison for all our crosses # in practice, we would typically make a set of xxxUSD total return indices # and use them to compute all other crosses (assuming we are USD denominated investor) for cross in ['AUDUSD', 'EURUSD', 'GBPUSD']: # create total return index using spot + deposits ind = IndicesFX() ind_df = ind.create_total_return_index(cross, tenor, spot_df, deposit_df) ind_df.columns = [x + '.PYT (with carry)' for x in ind_df.columns] # grab total return index which we downloaded from Bloomberg bbg_ind_df = tot_df[cross + '.close'].to_frame() bbg_ind_df.columns = [x + ".BBG (with carry)" for x in bbg_ind_df.columns] # grab spot data spot_plot_df = spot_df[cross + '.close'].to_frame() spot_plot_df = tsc.create_mult_index_from_prices(spot_plot_df) # combine total return indices (computed by PyThalesians), those from Bloomberg and also spot # with everything already rebased at 100 ind_df = ind_df.join(bbg_ind_df) ind_df = ind_df.join(spot_plot_df) gp = GraphProperties() gp.title = 'Total return indices in FX & comparing with spot' gp.scale_factor = 3 pf = PlotFactory() pf.plot_line_graph(ind_df, adapter = 'pythalesians', gp = gp)
time_series_request = TimeSeriesRequest( start_date="01 Jan 1970", # start date finish_date=datetime.date.today(), # finish date freq='daily', # daily data data_source='quandl', # use Quandl as data source tickers=['EURUSD', # ticker (Thalesians) 'GBPUSD'], fields=['close'], # which fields to download vendor_tickers=['FRED/DEXUSEU', 'FRED/DEXUSUK'], # ticker (Quandl) vendor_fields=['close'], # which Bloomberg fields to download cache_algo='internet_load_return') # how to return data ltsf = LightTimeSeriesFactory() daily_vals = ltsf.harvest_time_series(time_series_request) techind = TechIndicator() tech_params = TechParams() tech_params.sma_period = 20 techind.create_tech_ind(daily_vals, 'SMA', tech_params=tech_params) sma = techind.get_techind() signal = techind.get_signal() combine = daily_vals.join(sma, how='outer') pf = PlotFactory() pf.plot_line_graph(combine, adapter='pythalesians')
from pythalesians.economics.events.eventstudy import EventStudy es = EventStudy() # work out cumulative asset price moves moves over the event df_event = es.get_intraday_moves_over_custom_event(df, df_event_times) # create an average move df_event['Avg'] = df_event.mean(axis=1) # plotting spot over economic data event gp = GraphProperties() gp.scale_factor = 3 gp.title = 'USDJPY spot moves over recent NFP' # plot in shades of blue (so earlier releases are lighter, later releases are darker) gp.color = 'Blues' gp.color_2 = [] gp.y_axis_2_series = [] gp.display_legend = False # last release will be in red, average move in orange gp.color_2_series = [df_event.columns[-2], df_event.columns[-1]] gp.color_2 = ['red', 'orange'] # red, pink gp.linewidth_2 = 2 gp.linewidth_2_series = gp.color_2_series pf = PlotFactory() pf.plot_line_graph(df_event * 100, adapter='pythalesians', gp=gp)
finish_date = datetime.date.today(), # finish date freq = 'daily', # daily data data_source = 'quandl', # use quandl as data source tickers = ['S&P500'], # ticker (Thalesians) fields = ['close', 'open', 'adjusted-close', 'high'], # which fields to download vendor_tickers = ['YAHOO/INDEX_GSPC'], # ticker (quandl) vendor_fields = ['close', 'open', 'adjusted-close', 'high'], # which quandl fields to download cache_algo = 'internet_load_return') # how to return data ltsf = LightTimeSeriesFactory() df = None df = ltsf.harvest_time_series(time_series_request) pf = PlotFactory() pf.plot_line_graph(df, adapter = 'pythalesians') ###### download monthly quandl data for Total US nonfarm payrolls if True: time_series_request = TimeSeriesRequest( start_date="01 Jan 1940", # start date finish_date=datetime.date.today(), # finish date freq='daily', # daily data data_source='quandl', # use quandl as data source tickers=['US Total Nonfarm Payrolls'], # ticker (Thalesians) fields=['close'], # which fields to download vendor_tickers=['FRED/PAYEMS'], # ticker (quandl) vendor_fields=['close'], # which quandl fields to download cache_algo='internet_load_return') # how to return data ltsf = LightTimeSeriesFactory()
from pythalesians.economics.events.eventstudy import EventStudy es = EventStudy() # work out cumulative asset price moves moves over the event df_event = es.get_intraday_moves_over_custom_event(df, df_event_times) # create an average move df_event['Avg'] = df_event.mean(axis = 1) # plotting spot over economic data event gp = GraphProperties() gp.scale_factor = 3 gp.title = 'USDJPY spot moves over recent NFP' # plot in shades of blue (so earlier releases are lighter, later releases are darker) gp.color = 'Blues'; gp.color_2 = [] gp.y_axis_2_series = [] gp.display_legend = False # last release will be in red, average move in orange gp.color_2_series = [df_event.columns[-2], df_event.columns[-1]] gp.color_2 = ['red', 'orange'] # red, pink gp.linewidth_2 = 2 gp.linewidth_2_series = gp.color_2_series pf = PlotFactory() pf.plot_line_graph(df_event * 100, adapter = 'pythalesians', gp = gp)
ltsf = LightTimeSeriesFactory() df = ltsf.harvest_time_series(time_series_request) df_ret = tsc.calculate_returns(df) day_of_month_seasonality = seasonality.bus_day_of_month_seasonality(df_ret, partition_by_month = False) day_of_month_seasonality = tsc.convert_month_day_to_date_time(day_of_month_seasonality) gp = GraphProperties() gp.date_formatter = '%b' gp.title = 'S&P500 seasonality' gp.scale_factor = 3 gp.file_output = "output_data/S&P500 DOM seasonality.png" pf.plot_line_graph(day_of_month_seasonality, adapter='pythalesians', gp = gp) ###### calculate seasonal moves in EUR/USD (using Quandl data) if True: time_series_request = TimeSeriesRequest( start_date = "01 Jan 1970", # start date finish_date = datetime.date.today(), # finish date freq = 'daily', # daily data data_source = 'quandl', # use Quandl as data source tickers = ['EURUSD', # ticker (Thalesians) 'GBPUSD'], fields = ['close'], # which fields to download vendor_tickers = ['FRED/DEXUSEU', 'FRED/DEXUSUK'], # ticker (Quandl) vendor_fields = ['close'], # which Bloomberg fields to download cache_algo = 'internet_load_return') # how to return data
start_date="01 Jan 1970", # start date finish_date=datetime.date.today(), # finish date freq='daily', # daily data data_source='quandl', # use Quandl as data source tickers=[ 'EURUSD', # ticker (Thalesians) 'GBPUSD' ], fields=['close'], # which fields to download vendor_tickers=['FRED/DEXUSEU', 'FRED/DEXUSUK'], # ticker (Quandl) vendor_fields=['close'], # which Bloomberg fields to download cache_algo='internet_load_return') # how to return data ltsf = LightTimeSeriesFactory() daily_vals = ltsf.harvest_time_series(time_series_request) techind = TechIndicator() tech_params = TechParams() tech_params.sma_period = 20 techind.create_tech_ind(daily_vals, 'SMA', tech_params=tech_params) sma = techind.get_techind() signal = techind.get_signal() combine = daily_vals.join(sma, how='outer') pf = PlotFactory() pf.plot_line_graph(combine, adapter='pythalesians')
vendor_fields=['PX_LAST'], # which Bloomberg fields to download cache_algo='internet_load_return') # how to return data daily_vals = ltsf.harvest_time_series(time_series_request) pf = PlotFactory() gp = GraphProperties() gp.title = 'Spot values' gp.file_output = 'output_data/demo.png' gp.html_file_output = 'output_data/demo.htm' gp.source = 'Thalesians/BBG' # plot using PyThalesians pf.plot_line_graph(daily_vals, adapter='pythalesians', gp=gp) # plot using Bokeh (still needs a lot of work!) pf.plot_line_graph(daily_vals, adapter='bokeh', gp=gp) # do more complicated charts using several different Matplotib stylesheets (which have been customised) if False: ltsf = LightTimeSeriesFactory() # load market data start = '01 Jan 1970' end = datetime.datetime.utcnow() tickers = ['AUDJPY', 'USDJPY'] vendor_tickers = ['AUDJPY BGN Curncy', 'USDJPY BGN Curncy'] time_series_request = TimeSeriesRequest(