def before_trading_start(context, data): context.output = pipeline_output('example') context.short_list = context.output.sort(['sma_rank'], ascending=True).iloc[:200] context.long_list = context.output.sort(['sma_rank'], ascending=True).iloc[-200:] update_universe(context.long_list.index.union(context.short_list.index))
def before_trading_start(context, data): # Pipeline_output returns the constructed dataframe. output = pipeline_output('example') # Select and update your universe. context.my_universe = output.sort('sma_short', ascending=False).iloc[:200] update_universe(context.my_universe.index)
def handle_data(context, data): closes = pipeline_output('test_close') volumes = pipeline_output('test_volume') date = get_datetime().normalize() for asset in self.assets: # Assets should appear iff they exist today and yesterday. exists_today = self.exists(date, asset) existed_yesterday = self.exists(date - self.trading_day, asset) if exists_today and existed_yesterday: self.assertEqual( closes.loc[asset, 'close'], self.expected_close(date, asset) ) self.assertEqual( volumes.loc[asset, 'volume'], self.expected_volume(date, asset) ) else: self.assertNotIn(asset, closes.index) self.assertNotIn(asset, volumes.index)
def before_trading_start(context, data): context.output = pipeline_output("example") # Set the list of securities to short context.short_list = context.output.sort(["sma_rank"], ascending=True).iloc[:200] # Set the list of securities to long context.long_list = context.output.sort(["sma_rank"], ascending=True).iloc[-200:] # Update your universe with the SIDs of long and short securities update_universe(context.long_list.index.union(context.short_list.index))
def handle_data(context, data): results = pipeline_output("test") date = get_datetime().normalize() for asset in self.assets: # Assets should appear iff they exist today and yesterday. exists_today = self.exists(date, asset) existed_yesterday = self.exists(date - self.trading_day, asset) if exists_today and existed_yesterday: latest = results.loc[asset, "close"] self.assertEqual(latest, self.expected_close(date, asset)) else: self.assertNotIn(asset, results.index)
def handle_data(context, data): today = normalize_date(get_datetime()) results = pipeline_output("test") expect_over_300 = {AAPL: today < self.AAPL_split_date, MSFT: False, BRK_A: True} for asset in assets: should_pass_filter = expect_over_300[asset] if set_screen and not should_pass_filter: self.assertNotIn(asset, results.index) continue asset_results = results.loc[asset] self.assertEqual(asset_results["filter"], should_pass_filter) for length in vwaps: computed = results.loc[asset, vwap_key(length)] expected = vwaps[length][asset].loc[today] # Only having two places of precision here is a bit # unfortunate. assert_almost_equal(computed, expected, decimal=2)
def before_trading_start(context, data): context.pipeline_data = pipeline_output('my_pipeline')
def before_trading_start(context, data): print('before trading ran') context.output = pipeline_output("my_pipeline")
def before_trading_start(context, data): trace.append("BTS call") pipeline_output('my_pipeline')
def before_trading_start(context, data): factors = algo.pipeline_output('pipeline') returns = factors["1y_returns"].sort_values(ascending=False) context.winners = returns.index[:3]
def before_trading_start(context, data): pipeline_output("not_test") raise AssertionError("Shouldn't make it past pipeline_output!")
def before_trading_start(context, data): context.pipeline_data = pipeline_output('my_pipeline') #print("pipeline_data",type(context.pipeline_data)) #print(context.portfolio) pass
def before_trading_start(context, data): context.results = pipeline_output('test') self.assertTrue(context.results.empty) count[0] += 1
def before_trading_start(context, data): context.results = pipeline_output("test") assert context.results.empty count[0] += 1
def before_trading_start(context, data): """Run factor pipeline""" context.factor_data = pipeline_output('factor_pipeline') record(factor_data=context.factor_data.ranking) assets = context.factor_data.index record(prices=data.current(assets, 'price'))
def run_active_pipe(context): return pipeline_output('factor_pipe')
def before_trading_start(context, data): context.pipe_output = pipeline_output('top_dollar_volume') context.security_list = context.pipe_output.index
def rebalance(context, data): output = pipeline_output('pipeline') print(get_datetime()) print(output)
def before_trading_start(context, data): """ Called every day before market open. """ context.output = pipeline_output('my_pipeline') print(context.output)
def before_trading_start(context, data): # Gets our pipeline output every day. context.output = pipeline_output('my_pipeline')
def initialize(context): attach_pipeline(Pipeline(), 'test') pipeline_output('test') raise AssertionError("Shouldn't make it past pipeline_output()")
def before_trading_start(context, data): pipeline_output('not_test') raise AssertionError("Shouldn't make it past pipeline_output!")
def before_trading_start(context, data): context.all_assets = pipeline_output('data_pipeline') context.output = context.all_assets
def rebalance(context, data): """ Execute orders according to our schedule_function() timing. """ logger.debug('rebalancing on: %s', algo.get_datetime()) context.trend_filter = False # new_portfolio = algo.pipeline_output('pipeline').dropna(subset=['overall_rank']).sort_values('momentum', ascending=False) new_portfolio = algo.pipeline_output('pipeline').dropna( subset=['overall_rank']).sort_values('momentum', ascending=False) for equity, row in new_portfolio.iterrows(): logger.debug('new portfolio (before filtering) - equity: %s', equity) # print(new_portfolio) # new_portfolio = new_portfolio[new_portfolio['overall_rank'].notna() & new_portfolio['momentum'] > 40][:20] # new_portfolio = new_portfolio[(new_portfolio['momentum_decile'] > 8)][:20] new_portfolio = new_portfolio.nlargest( 20, ['overall_rank', 'momentum']) #<- $600K PL in 10 years # new_portfolio = new_portfolio.nlargest(20, ['momentum', 'overall_rank']) #<- 1M PL in 10 years if logger.level is logging.DEBUG: for equity, row in new_portfolio.iterrows(): logger.debug('new portfolio - (after filtering) equity: %s', equity) # print(len(new_portfolio.index)) # volatility driven weights # new_portfolio['inverse_volatility'] = new_portfolio['volatility'].apply(lambda x: 1 / x) # inv_vola_sum = new_portfolio['inverse_volatility'].sum() # new_portfolio['target_weight'] = new_portfolio['inverse_volatility'].apply(lambda x: x / inv_vola_sum) # portfolio size driven weights # num_equities = len(new_portfolio.index) # new_portfolio['target_weight'] = 1 / num_equities\ # logger.info('len existing portfolio: %s', len(context.portfolio.positions)) if logger.level is logging.DEBUG: for equity, values in context.portfolio.positions.items(): logger.debug( 'context.portfolio.positions - equity: %s, amount: %s, cost_basis: %s, sold_on: %s, sold_at_price: %s', equity, values.amount, values.cost_basis, values.last_sale_date, values.last_sale_price) order_target(algo.sid('FIBBG000NTFYM5'), 0) logger.debug('selling all bonds') for equity in context.portfolio.positions: if equity is algo.sid('FIBBG000NTFYM5'): continue if equity not in set(new_portfolio.index.tolist()): # logger.info('selling %s', equity) order_target_percent(equity, 0) stock_weights = 1.0 / max(len(context.portfolio.positions), len(new_portfolio.index)) logger.debug('len existing portfolio (afer ejection): %s', len(context.portfolio.positions)) logger.debug('len new portfolio: %s', len(new_portfolio.index)) logger.debug('stock_weights: %s', stock_weights) # print(context.portfolio.positions.get(algo.sid('FIBBG000NTFYM5'))) # spy = context.portfolio.positions.get(algo.sid('FIBBG000NTFYM5')) # if (spy is not None) and (spy.amount > 0): # order_target_percent(algo.sid('FIBBG000NTFYM5'), 0) for equity, row in new_portfolio.iterrows(): if row.trend_filter is True: # logger.info('buying %s', equity) context.trend_filter = True order_target_percent(equity, stock_weights) else: context.trend_filter = False logger.debug('cash: %s', context.portfolio.cash) logger.debug('portfolio_value: %s', context.portfolio.portfolio_value) logger.debug('num_positions: %s', len(context.portfolio.positions)) logger.debug('positions: %s', context.portfolio.positions)
def initialize(context): attach_pipeline(Pipeline(), "test") pipeline_output("test") raise AssertionError("Shouldn't make it past pipeline_output()")
def before_trading_start(context, data): schedule_function(rebalance, date_rules.every_day()) context.pipeline_data = pipeline_output('my_pipeline')
def before_trading_start(context, data): c = context update_portfolio_auto_close(c, data) c.ORDERS_DONE = False #No Orders done yet today c.REBALANCE_DONE = False #No Orders done yet today c.MINUTES_TO_REBAL = MINUTES_TO_REBAL c.all_orders = {} ### ajjc live if IS_LIVE: #schedule_function(rebalance, date_rules.every_day(),time_rule=time_rules.market_open()) #schedule_function(rebalance, date_rules.every_day(),time_rule=time_rules.every_minute()) for i in range(1, 391): #Daily schedule_function(rebalance, date_rules.every_day(), time_rules.market_open(minutes=i)) #Weekley schedule_function(func=rebalance, date_rule=date_rules.every_day(), time_rule=time_rules.market_open(minutes=i), half_days=True) pass current_time = context.get_datetime('US/Eastern') if DEBUG: log.debug( 'Time:before_trading_start:US/Eastern {}'.format(current_time )) df_pre = pipeline_output('pipeline') if not df_pre.empty: # Drop Sharadar assets with '.' or '-' in the symbol, as IB does not support that naming. df_pre = df_pre.reset_index() num_full_assets = len(df_pre) df_pre = df_pre[df_pre['index'].map(lambda x:len(str(x.symbol).split('.')) == 1)].set_index('index') #df_pre = df_pre[df_pre['index'].map(lambda x:len(str(x.symbol).split('-')) == 1)].set_index('index') print("NumAssetsDropped={}".format(num_full_assets - len(df_pre))) # Filter out incompatible assets (extend this later via FIGGY mappings) ##Remove rows with assets that have - or . in symbol name, as they are wrong IB symbol name formats. ##Remove rows with assets that have exchange=1, as those are from SF1(Funds/ETFs/Indexes) for row in df_pre.index: if (row.symbol.split('-')[0] == row.symbol) and (row.symbol.split('.')[0] == row.symbol) and (row.exchange=='0'): print("Keep Asset:{} exchange:{}".format(row, row.exchange)) else: print("Remove Asset:{} exchange:{}".format(row, row.exchange)) df_pre.drop(row, inplace=True) c.pipeline_data = df_pre if len(c.auto_close) >0: for x in c.auto_close: if x in c.pipeline_data: c.pipeline_data.drop(x, inplace=True) print("DroppedFromPipeline:InAutoClose:[{}] Date:{}".format(x, c.get_datetime())) print("BeforeTrStrt:", c.pipeline_data) # ajjc Test Using broker=IB + data-frequncy=daily: #spy_maFast_curr = data.current(symbol('AAPL') , "price") #spy_maFast = data.history(symbol('AAPL') , "price", 5, "1d") #print("spy_maFast",spy_maFast) log.info("BTS___CurrZiplinPosBef: {}".format(context.portfolio.positions)) #BUG: This is a Criticalupdate... if IS_LIVE: log.info("BTS___CurrBrokerPosCur: {}".format(context.broker.positions)) for x in list(context.portfolio.positions): #ajjc: zlb: BUG: Clean out null portfolio values. Handle this generically in zipline-broker in some way amt_x_port = context.portfolio.positions[x].amount if amt_x_port == 0: del context.portfolio.positions[x] log.info("BTS___CurrZiplinPosAft: {}".format(context.portfolio.positions)) #BUG: This is a Criticalupdate...
def before_trading_start(context, data): context.results = pipeline_output("test") self.assertTrue(context.results.empty) count[0] += 1
def before_trading_start(context, data): output = pipeline_output('pipeline_tutorial') context.my_universe = output.sort('ma_ratio', ascending=False).iloc[:100]