def turtle( asset, start_date, end_date, systems, config): rollrule = config['rollrule'] nearby = config['nearby'] file_prefix = config['file_prefix'] + '_' + asset + '_' start_d = misc.day_shift(start_date, '-'+str(max([ max(sys) for sys in systems]))+'b') ddf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'd', need_shift=True) mdf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True) #ddf = dh.conv_ohlc_freq(mdf, 'D') output = {} for ix, sys in enumerate(systems): config['signals'] = sys[:3] config['max_loss'] = sys[3] config['max_pos'] = sys[4] (res, closed_trades, ts) = turtle_sim( ddf, mdf, config) output[ix] = res print 'saving results for scen = %s' % str(ix) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(ix) + '_trades.csv' trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(ix) + '_dailydata.csv' ts.to_csv(fname) fname = file_prefix + 'stats.csv' res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def dual_thrust( asset, start_date, end_date, scenarios, config): nearby = config['nearby'] rollrule = config['rollrule'] start_d = misc.day_shift(start_date, '-2b') file_prefix = config['file_prefix'] + '_' + asset + '_' ddf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'd', need_shift=True) mdf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'm', need_shift=True) #ddf = dh.conv_ohlc_freq(mdf, 'D') output = {} for ix, s in enumerate(scenarios): config['win'] = s[1] config['k'] = s[0] config['m'] = s[2] (res, closed_trades, ts) = dual_thrust_sim( ddf, mdf, config) output[ix] = res print 'saving results for scen = %s' % str(ix) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(ix) + '_trades.csv' trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(ix) + '_dailydata.csv' ts.to_csv(fname) fname = file_prefix + 'stats.csv' res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def dual_thrust( asset, start_date, end_date, scenarios, config): nearby = config['nearby'] rollrule = config['rollrule'] start_d = misc.day_shift(start_date, '-4b') file_prefix = config['file_prefix'] + '_' + asset + '_' ddf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'd', need_shift=True) mdf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'm', need_shift=True) mdf = backtest.cleanup_mindata(mdf, asset) #ddf = dh.conv_ohlc_freq(mdf, 'D') output = {} for ix, s in enumerate(scenarios): config['win'] = s[1] config['k'] = s[0] config['m'] = s[2] (res, closed_trades, ts) = dual_thrust_sim( ddf, mdf, config) output[ix] = res print 'saving results for scen = %s' % str(ix) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(ix) + '_trades.csv' trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(ix) + '_dailydata.csv' ts.to_csv(fname) fname = file_prefix + 'stats.csv' res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def r_breaker( asset, start_date, end_date, scenarios, freqs, config): nearby = config['nearby'] rollrule = config['rollrule'] start_d = misc.day_shift(start_date, '-1b') file_prefix = config['file_prefix'] + '_' + asset + '_' ddf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'd', need_shift=True) mdf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True) mdf = backtest.cleanup_mindata(mdf, asset) #ddf = dh.conv_ohlc_freq(mdf, 'D') output = {} for ix, freq in enumerate(freqs): if freq !='1min': df = dh.conv_ohlc_freq(mdf, freq) else: df = mdf for iy, k in enumerate(scenarios): idx = ix*10+iy config['k'] = k (res, closed_trades, ts) = r_breaker_sim( ddf, df, config) output[idx] = res print 'saving results for scen = %s' % str(idx) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(idx) + '_trades.csv' trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(idx) + '_dailydata.csv' ts.to_csv(fname) fname = file_prefix + 'stats.csv' res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def chanbreak( asset, start_date, end_date, freqs, windows, config): nearby = config['nearby'] rollrule = config['rollrule'] file_prefix = config['file_prefix'] + '_' + asset + '_' ddf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'd', need_shift=True) mdf = misc.nearby(asset, nearby, start_d, end_date, rollrule, 'm', need_shift=True) mdf = backtest.cleanup_mindata(mdf, asset) output = {} for ix, freq in enumerate(freqs): config['freq'] = freq for iy, win in enumerate(windows): idx = ix*10+iy config['win'] = win (res, closed_trades, ts) = chanbreak_sim( mdf, ddf, config) output[idx] = res print 'saving results for scen = %s' % str(idx) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(idx) + '_trades.csv' trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(idx) + '_dailydata.csv' ts.to_csv(fname) fname = file_prefix + 'stats.csv' res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def dual_thrust(asset, start_date, end_date, scenarios, config): nearby = config["nearby"] rollrule = config["rollrule"] start_d = misc.day_shift(start_date, "-4b") file_prefix = config["file_prefix"] + "_" + asset + "_" ddf = misc.nearby(asset, nearby, start_d, end_date, rollrule, "d", need_shift=True) mdf = misc.nearby(asset, nearby, start_d, end_date, rollrule, "m", need_shift=True) mdf = backtest.cleanup_mindata(mdf, asset) # ddf = dh.conv_ohlc_freq(mdf, 'D') output = {} for ix, s in enumerate(scenarios): config["win"] = s[1] config["k"] = s[0] config["m"] = s[2] (res, closed_trades, ts) = dual_thrust_sim(ddf, mdf, config) output[ix] = res print "saving results for scen = %s" % str(ix) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(ix) + "_trades.csv" trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(ix) + "_dailydata.csv" ts.to_csv(fname) fname = file_prefix + "stats.csv" res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def load_data(self, idx): asset = self.sim_assets[idx] for prod in asset: mdf = misc.nearby(prod, self.config['nearby'], self.config['start_date'], self.config['end_date'], self.config['rollrule'], 'm', need_shift = self.need_shift, database=self.config.get('database', 'hist_data')) mdf = misc.cleanup_mindata(mdf, prod) self.min_data[prod] = mdf
def fisher_swing(asset, start_date, end_date, freqs, windows, config): nearby = config["nearby"] rollrule = config["rollrule"] file_prefix = config["file_prefix"] + "_" + asset + "_" df = misc.nearby(asset, nearby, start_date, end_date, rollrule, "m", need_shift=True) df = backtest.cleanup_mindata(df, asset) output = {} for ix, freq in enumerate(freqs): xdf = dh.conv_ohlc_freq(df, freq) for iy, win in enumerate(windows): idx = ix * 10 + iy config["win"] = win config["freq"] = freq (res, closed_trades, ts) = fisher_swing_sim(df, xdf, config) output[idx] = res print "saving results for scen = %s" % str(idx) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(idx) + "_trades.csv" trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(idx) + "_dailydata.csv" ts.to_csv(fname) fname = file_prefix + "stats.csv" res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def fisher_swing(asset, start_date, end_date, freqs, windows, config): nearby = config['nearby'] rollrule = config['rollrule'] file_prefix = config['file_prefix'] + '_' + asset + '_' df = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True) df = backtest.cleanup_mindata(df, asset) output = {} for ix, freq in enumerate(freqs): xdf = dh.conv_ohlc_freq(df, freq) for iy, win in enumerate(windows): idx = ix * 10 + iy config['win'] = win config['freq'] = freq (res, closed_trades, ts) = fisher_swing_sim(df, xdf, config) output[idx] = res print 'saving results for scen = %s' % str(idx) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) fname = file_prefix + str(idx) + '_trades.csv' trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname) fname = file_prefix + str(idx) + '_dailydata.csv' ts.to_csv(fname) fname = file_prefix + 'stats.csv' res = pd.DataFrame.from_dict(output) res.to_csv(fname) return
def get_cont_data(asset, start_date, end_date, freq='1m', nearby=1, rollrule='-10b'): if nearby == 0: mdf = mysqlaccess.load_min_data_to_df('fut_min', asset, start_date, end_date, minid_start=300, minid_end=2114, database='hist_data') mdf['contract'] = asset else: mdf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True, database='hist_data') mdf = backtest.cleanup_mindata(mdf, asset) xdf = dh.conv_ohlc_freq(mdf, freq, extra_cols=['contract'], bar_func=dh.bar_conv_func2) return xdf
def load_data(self, idx): asset = self.sim_assets[idx] for prod in asset: if prod in self.data_store: continue ticker = prod if '$' not in ticker: ticker_sp = [ticker, 'spot'] else: ticker_sp = ticker.split('$') ticker = ticker_sp[0] postfix = '_daily' if self.sim_freq == 'm': postfix = '_min' dbtable = ticker_sp[-1] + postfix if ticker_sp[-1] in ['spot']: field_id = 'spotID' elif ticker_sp[-1] in ['ccy']: field_id = 'instID' if len(ticker_sp) > 2: nb = int(ticker_sp[1]) if len(ticker_sp) > 3: rollrule = ticker_sp[2] else: rollrule = '-1b' df = misc.nearby(ticker, nb, self.config['start_date'], self.config['end_date'], rollrule, self.sim_freq, need_shift=self.need_shift, database=self.config.get( 'dbconfig', dbaccess.dbconfig)['database']) else: cnx = dbaccess.connect( **self.config.get('dbconfig', dbaccess.dbconfig)) if self.sim_freq == 'd': df = dbaccess.load_daily_data_to_df( cnx, dbtable, ticker, self.config['start_date'], self.config['end_date'], index_col='date', field=field_id) else: minid_start = 1500 minid_end = 2114 if ticker in misc.night_session_markets: minid_start = 300 df = dbaccess.load_min_data_to_df( cnx, dbtable, ticker, self.config['start_date'], self.config['end_date'], minid_start, minid_end) df['contract'] = ticker if self.sim_freq == 'm': df = misc.cleanup_mindata(df, ticker) df.columns = [(prod, col) for col in df.columns] self.data_store[prod] = df
def get_data(spotID, start, end, spot_table='spot_daily', name=None, index_col='date', fx_pair=None, field='spotID', args=None): cnx = dbaccess.connect(**dbaccess.dbconfig) if args: args['start_date'] = start args['end_date'] = end df = misc.nearby(spotID, **args) else: df = dbaccess.load_daily_data_to_df(cnx, spot_table, spotID, start, end, index_col=None, field=field) if isinstance(df[index_col][0], basestring): if len(df[index_col][0]) > 12: df[index_col] = df[index_col].apply( lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S" ).date()) else: df[index_col] = df[index_col].apply( lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date()) df = df.set_index(index_col) if name: col_name = name else: col_name = spotID if field == 'ccy': df = df[df.tenor == '0W'] data_field = 'rate' elif field == 'spotID': data_field = 'close' df = df[[data_field]] df.rename(columns={data_field: col_name}, inplace=True) if fx_pair: fx = dbaccess.load_daily_data_to_df(cnx, 'fx_daily', fx_pair, start, end, index_col=None, field='ccy') fx = fx[fx['tenor'] == '0W'] if isinstance(fx[index_col][0], basestring): fx[index_col] = fx[index_col].apply( lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S" ).date()) fx = fx.set_index(index_col) fx = fx.set_index(index_col) df[col_name] = df[col_name] / fx['rate'] return df
def load_data(self, idx): asset = self.sim_assets[idx] for prod in asset: df = misc.nearby(prod, self.config['nearby'], self.config['start_date'], self.config['end_date'], self.config['rollrule'], self.sim_freq, need_shift = self.need_shift) if self.sim_freq == 'm': df = misc.cleanup_mindata(df, prod) self.data_store[prod] = df
def get_cont_data(asset, start_date, end_date, freq = '1m', nearby = 1, rollrule = '-10b'): cnx = dbaccess.connect(**dbaccess.hist_dbconfig) if nearby == 0: mdf = dbaccess.load_min_data_to_df(cnx, 'fut_min', asset, start_date, end_date, minid_start = 300, minid_end = 2114, database = 'hist_data') mdf['contract'] = asset else: mdf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True, database = 'hist_data') mdf = misc.cleanup_mindata(mdf, asset) xdf = dh.conv_ohlc_freq(mdf, freq, extra_cols = ['contract'], bar_func = dh.bar_conv_func2) return xdf
def load_data(self, idx): asset = self.sim_assets[idx] for prod in asset: df = misc.nearby(prod, self.config['nearby'], self.config['start_date'], self.config['end_date'], self.config['rollrule'], self.sim_freq, need_shift=self.need_shift) if self.sim_freq == 'm': df = misc.cleanup_mindata(df, prod) self.data_store[prod] = df
def load_data(self, idx): asset = self.sim_assets[idx] for prod in asset: if prod in self.data_store: continue ticker = prod if '$' not in ticker: ticker_sp = [ticker, 'spot'] else: ticker_sp = ticker.split('$') ticker = ticker_sp[0] postfix = '_daily' if self.sim_freq == 'm': postfix = '_min' dbtable = ticker_sp[-1] + postfix if ticker_sp[-1] in ['spot']: field_id = 'spotID' elif ticker_sp[-1] in ['ccy']: field_id = 'instID' if len(ticker_sp) > 2: nb = int(ticker_sp[1]) if len(ticker_sp) > 3: rollrule = ticker_sp[2] else: rollrule = '-1b' df = misc.nearby(ticker, nb, self.config['start_date'], self.config['end_date'], rollrule, self.sim_freq, need_shift = self.need_shift, database = self.config.get('dbconfig', dbaccess.dbconfig)['database']) else: cnx = dbaccess.connect(**self.config.get('dbconfig', dbaccess.dbconfig)) if self.sim_freq == 'd': df = dbaccess.load_daily_data_to_df(cnx, dbtable, ticker, self.config['start_date'], self.config['end_date'], index_col='date', field = field_id) else: minid_start = 1500 minid_end = 2114 if ticker in misc.night_session_markets: minid_start = 300 df = dbaccess.load_min_data_to_df(cnx, dbtable, ticker, self.config['start_date'], self.config['end_date'], minid_start, minid_end) df['contract'] = ticker if self.sim_freq == 'm': df = misc.cleanup_mindata(df, ticker) df.columns = [(prod, col) for col in df.columns] self.data_store[prod] = df
def get_data(spotID, start, end, spot_table = 'spot_daily', name = None, index_col = 'date', fx_pair = None, field = 'spotID', args = None): cnx = dbaccess.connect(**dbaccess.dbconfig) if args: args['start_date'] = start args['end_date'] = end df = misc.nearby(spotID, **args) df = df.reset_index() else: df = dbaccess.load_daily_data_to_df(cnx, spot_table, spotID, start, end, index_col = None, field = field) if isinstance(df[index_col][0], basestring): if len(df[index_col][0])> 12: df[index_col] = df[index_col].apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").date()) else: df[index_col] = df[index_col].apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date()) df = df.set_index(index_col) if name: col_name = name else: col_name = spotID if field == 'ccy': df = df[df.tenor=='0W'] data_field = 'rate' elif field == 'spotID': data_field = 'close' elif field == 'instID': data_field = 'close' df = df[[data_field]] df.rename(columns = {data_field: col_name}, inplace = True) if fx_pair: fx = fx_pair.split('/') direction = misc.get_mkt_fxpair(fx[0], fx[1]) if direction < 0: mkt_pair = '/'.join([fx[1],fx[0]]) else: mkt_pair = fx_pair fx = dbaccess.load_daily_data_to_df(cnx, 'fx_daily', mkt_pair, start, end, index_col = None, field = 'ccy') fx = fx[fx['tenor']=='0W'] if isinstance(fx[index_col][0], basestring): fx[index_col] = fx[index_col].apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").date()) fx = fx.set_index(index_col) if direction >= 0: df[col_name] = df[col_name]/fx['rate'] else: df[col_name] = df[col_name]*fx['rate'] return df
def simlauncher_min(config_file): sim_config = {} with open(config_file, 'r') as fp: sim_config = json.load(fp) bktest_split = sim_config['sim_func'].split('.') bktest_module = __import__(bktest_split[0]) run_sim = getattr(bktest_module, bktest_split[1]) dir_name = config_file.split('.')[0] test_folder = get_bktest_folder() file_prefix = test_folder + dir_name + os.path.sep if not os.path.exists(file_prefix): os.makedirs(file_prefix) sim_list = sim_config['products'] config = {} start_date = datetime.datetime.strptime(sim_config['start_date'], '%Y%m%d').date() config['start_date'] = start_date end_date = datetime.datetime.strptime(sim_config['end_date'], '%Y%m%d').date() config['end_date'] = end_date scen_dim = [len(sim_config[s]) for s in sim_config['scen_keys']] outcol_list = ['asset', 'scenario'] + sim_config['scen_keys'] \ + ['sharp_ratio', 'tot_pnl', 'std_pnl', 'num_days', \ 'max_drawdown', 'max_dd_period', 'profit_dd_ratio', \ 'all_profit', 'tot_cost', 'win_ratio', 'num_win', 'num_loss', \ 'profit_per_win', 'profit_per_loss'] scenarios = [list(s) for s in np.ndindex(tuple(scen_dim))] config.update(sim_config['config']) config['pos_class'] = eval(sim_config['pos_class']) if 'proc_func' in sim_config: config['proc_func'] = eval(sim_config['proc_func']) file_prefix = file_prefix + sim_config['sim_name'] if config['close_daily']: file_prefix = file_prefix + 'daily_' config['file_prefix'] = file_prefix summary_df = pd.DataFrame() fname = config['file_prefix'] + 'summary.csv' if os.path.isfile(fname): summary_df = pd.DataFrame.from_csv(fname) for asset in sim_list: file_prefix = config['file_prefix'] + '_' + asset + '_' fname = file_prefix + 'stats.json' output = {} if os.path.isfile(fname): with open(fname, 'r') as fp: output = json.load(fp) if len(output.keys()) < len(scenarios): if asset in sim_start_dict: start_date = max(sim_start_dict[asset], config['start_date']) else: start_date = config['start_date'] if 'offset' in sim_config: config[ 'offset'] = sim_config['offset'] * trade_offset_dict[asset] else: config['offset'] = trade_offset_dict[asset] config['marginrate'] = (sim_margin_dict[asset], sim_margin_dict[asset]) config['nearby'] = 1 config['rollrule'] = '-50b' config['exit_min'] = 2112 config['no_trade_set'] = range(300, 301) + range( 1500, 1501) + range(2059, 2100) if asset in ['cu', 'al', 'zn']: config['nearby'] = 3 config['rollrule'] = '-1b' elif asset in ['IF', 'IH', 'IC']: config['rollrule'] = '-2b' config['no_trade_set'] = range(1515, 1520) + range(2110, 2115) elif asset in ['au', 'ag']: config['rollrule'] = '-25b' elif asset in ['TF', 'T']: config['rollrule'] = '-20b' config['no_trade_set'] = range(1515, 1520) + range(2110, 2115) config['no_trade_set'] = [] nearby = config['nearby'] rollrule = config['rollrule'] if nearby > 0: mdf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True, database='hist_data') mdf = cleanup_mindata(mdf, asset) if 'need_daily' in sim_config: ddf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'd', need_shift=True, database='hist_data') config['ddf'] = ddf for ix, s in enumerate(scenarios): fname1 = file_prefix + str(ix) + '_trades.csv' fname2 = file_prefix + str(ix) + '_dailydata.csv' if os.path.isfile(fname1) and os.path.isfile(fname2): continue for key, seq in zip(sim_config['scen_keys'], s): config[key] = sim_config[key][seq] df = mdf.copy(deep=True) (res, closed_trades, ts) = run_sim(df, config) res.update(dict(zip(sim_config['scen_keys'], s))) res['asset'] = asset output[ix] = res print 'saving results for asset = %s, scen = %s' % (asset, str(ix)) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname1) ts.to_csv(fname2) fname = file_prefix + 'stats.json' try: with open(fname, 'w') as ofile: json.dump(output, ofile) except: continue res = pd.DataFrame.from_dict(output, orient='index') res.index.name = 'scenario' res = res.sort(columns=['sharp_ratio'], ascending=False) res = res.reset_index() res.set_index(['asset', 'scenario']) out_res = res[outcol_list] if len(summary_df) == 0: summary_df = out_res[:10].copy(deep=True) else: summary_df = summary_df.append(out_res[:10]) fname = config['file_prefix'] + 'summary.csv' summary_df.to_csv(fname) return
def simlauncher_min(config_file): sim_config = {} with open(config_file, 'r') as fp: sim_config = json.load(fp) bktest_split = sim_config['sim_func'].split('.') bktest_module = __import__(bktest_split[0]) run_sim = getattr(bktest_module, bktest_split[1]) dir_name = config_file.split('.')[0] test_folder = get_bktest_folder() file_prefix = test_folder + dir_name + os.path.sep if not os.path.exists(file_prefix): os.makedirs(file_prefix) sim_list = sim_config['products'] config = {} start_date = datetime.datetime.strptime(sim_config['start_date'], '%Y%m%d').date() config['start_date'] = start_date end_date = datetime.datetime.strptime(sim_config['end_date'], '%Y%m%d').date() config['end_date'] = end_date scen_dim = [ len(sim_config[s]) for s in sim_config['scen_keys']] outcol_list = ['asset', 'scenario'] + sim_config['scen_keys'] \ + ['sharp_ratio', 'tot_pnl', 'std_pnl', 'num_days', \ 'max_drawdown', 'max_dd_period', 'profit_dd_ratio', \ 'all_profit', 'tot_cost', 'win_ratio', 'num_win', 'num_loss', \ 'profit_per_win', 'profit_per_loss'] scenarios = [list(s) for s in np.ndindex(tuple(scen_dim))] config.update(sim_config['config']) config['pos_class'] = eval(sim_config['pos_class']) if 'proc_func' in sim_config: config['proc_func'] = eval(sim_config['proc_func']) file_prefix = file_prefix + sim_config['sim_name'] if config['close_daily']: file_prefix = file_prefix + 'daily_' config['file_prefix'] = file_prefix summary_df = pd.DataFrame() fname = config['file_prefix'] + 'summary.csv' if os.path.isfile(fname): summary_df = pd.DataFrame.from_csv(fname) for asset in sim_list: file_prefix = config['file_prefix'] + '_' + asset + '_' fname = file_prefix + 'stats.json' output = {} if os.path.isfile(fname): with open(fname, 'r') as fp: output = json.load(fp) if len(output.keys()) < len(scenarios): if asset in sim_start_dict: start_date = max(sim_start_dict[asset], config['start_date']) else: start_date = config['start_date'] if 'offset' in sim_config: config['offset'] = sim_config['offset'] * trade_offset_dict[asset] else: config['offset'] = trade_offset_dict[asset] config['marginrate'] = ( sim_margin_dict[asset], sim_margin_dict[asset]) config['nearby'] = 1 config['rollrule'] = '-50b' config['exit_min'] = 2112 config['no_trade_set'] = range(300, 301) + range(1500, 1501) + range(2059, 2100) if asset in ['cu', 'al', 'zn']: config['nearby'] = 3 config['rollrule'] = '-1b' elif asset in ['IF', 'IH', 'IC']: config['rollrule'] = '-2b' config['no_trade_set'] = range(1515, 1520) + range(2110, 2115) elif asset in ['au', 'ag']: config['rollrule'] = '-25b' elif asset in ['TF', 'T']: config['rollrule'] = '-20b' config['no_trade_set'] = range(1515, 1520) + range(2110, 2115) config['no_trade_set'] = [] nearby = config['nearby'] rollrule = config['rollrule'] if nearby > 0: mdf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True) mdf = cleanup_mindata(mdf, asset) if 'need_daily' in sim_config: ddf = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'd', need_shift=True) config['ddf'] = ddf for ix, s in enumerate(scenarios): fname1 = file_prefix + str(ix) + '_trades.csv' fname2 = file_prefix + str(ix) + '_dailydata.csv' if os.path.isfile(fname1) and os.path.isfile(fname2): continue for key, seq in zip(sim_config['scen_keys'], s): config[key] = sim_config[key][seq] df = mdf.copy(deep = True) (res, closed_trades, ts) = run_sim( df, config) res.update(dict(zip(sim_config['scen_keys'], s))) res['asset'] = asset output[ix] = res print 'saving results for asset = %s, scen = %s' % (asset, str(ix)) all_trades = {} for i, tradepos in enumerate(closed_trades): all_trades[i] = strat.tradepos2dict(tradepos) trades = pd.DataFrame.from_dict(all_trades).T trades.to_csv(fname1) ts.to_csv(fname2) fname = file_prefix + 'stats.json' try: with open(fname, 'w') as ofile: json.dump(output, ofile) except: continue res = pd.DataFrame.from_dict(output, orient = 'index') res.index.name = 'scenario' res = res.sort(columns = ['sharp_ratio'], ascending=False) res = res.reset_index() res.set_index(['asset', 'scenario']) out_res = res[outcol_list] if len(summary_df) == 0: summary_df = out_res[:10].copy(deep = True) else: summary_df = summary_df.append(out_res[:10]) fname = config['file_prefix'] + 'summary.csv' summary_df.to_csv(fname) return
def aberration( asset, start_date, end_date, freqs, windows, config): nearby = config['nearby'] rollrule = config['rollrule'] file_prefix = config['file_prefix'] + '_' + asset + '_' df = misc.nearby(asset, nearby, start_date, end_date, rollrule, 'm', need_shift=True)