def minute_get_bars(days): cls = self.__class__ now = get_algo_instance().datetime if now != cls._minute_bar_cache_dt: cls._minute_bar_cache_dt = now cls._minute_bar_cache = {} if days not in cls._minute_bar_cache: # Cache this calculation to happen once per bar, even if we # use another transform with the same number of days. env = get_algo_instance().trading_environment prev = env.previous_trading_day(now) ds = env.days_in_range( env.add_trading_days(-days + 2, prev), prev, ) # compute the number of minutes in the (days - 1) days before # today. # 210 minutes in a an early close and 390 in a full day. ms = sum(210 if d in env.early_closes else 390 for d in ds) # Add the number of minutes for today. ms += int( (now - env.get_open_and_close(now)[0]).total_seconds() / 60 ) cls._minute_bar_cache[days] = ms + 1 # Account for this minute return cls._minute_bar_cache[days]
def minute_get_bars(days): cls = self.__class__ now = get_algo_instance().datetime if now != cls._minute_bar_cache_dt: cls._minute_bar_cache_dt = now cls._minute_bar_cache = {} if days not in cls._minute_bar_cache: # Cache this calculation to happen once per bar, even if we # use another transform with the same number of days. env = get_algo_instance().trading_environment prev = env.previous_trading_day(now) ds = env.days_in_range( env.add_trading_days(-days + 2, prev), prev, ) # compute the number of minutes in the (days - 1) days before # today. # 210 minutes in a an early close and 390 in a full day. ms = sum(210 if d in env.early_closes else 390 for d in ds) # Add the number of minutes for today. ms += int( (now - env.get_open_and_close(now)[0]).total_seconds() / 60) cls._minute_bar_cache[days] = ms + 1 # Account for this minute return cls._minute_bar_cache[days]
def downsample_panel(minute_rp, daily_rp, mkt_close): """ @minute_rp is a rolling panel, which should have minutely rows @daily_rp is a rolling panel, which should have daily rows @dt is the timestamp to use when adding a frame to daily_rp Using the history in minute_rp, a new daily bar is created by downsampling. The data from the daily bar is then added to the daily rolling panel using add_frame. """ cur_panel = minute_rp.get_current() sids = minute_rp.minor_axis day_frame = pd.DataFrame(columns=sids, index=cur_panel.items) env = get_algo_instance().trading_environment dt1 = env.normalize_date(mkt_close) dt2 = env.next_trading_day(mkt_close) by_close = functools.partial(get_date, mkt_close, dt1, dt2) for item in minute_rp.items: frame = cur_panel[item] func = get_sample_func(item) # group by trading day, using the market close of the current # day. If events occurred after the last close (yesterday) but # before today's close, group them into today. dframe = frame.groupby(lambda d: by_close(d)).agg(func) for stock in sids: day_frame[stock][item] = dframe[stock].ix[dt1] # store the frame at midnight instead of the close daily_rp.add_frame(dt1, day_frame)
def _get_buffer(self, bars, field='price'): """ Gets the result of history for the given number of bars and field. This will cache the results internally. """ cls = self.__class__ algo = get_algo_instance() now = algo.datetime if now != cls._history_cache_dt: # For a given dt, the history call for this field will not change. # We have a new dt, so we should reset the cache. cls._history_cache_dt = now cls._history_cache = {} if field not in self._history_cache \ or bars > len(cls._history_cache[field].index): # If we have never cached this field OR the amount of bars that we # need for this field is greater than the amount we have cached, # then we need to get more history. hst = algo.history( bars, self._freqstr, field, ffill=True, ) # Assert that the column holds ints, not security objects. if not isinstance(self._sid, str): hst.columns = hst.columns.astype(int) self._history_cache[field] = hst # Slice of only the bars needed. This is because we strore the LARGEST # amount of history for the field, and we might request less than the # largest from the cache. return cls._history_cache[field][self._sid][-bars:]
def factors(self): algo = get_algo_instance() today = normalize_date(algo.get_datetime()) if today > self._factor_matrix_expires: self._factor_matrix, self._factor_matrix_expires = \ algo.compute_factor_matrix(today) return self._factor_matrix.loc[today]
def wrapped(*args, **kwargs): # Get the instance and call the method algo_instance = get_algo_instance() if algo_instance is None: raise RuntimeError( 'zipline api method %s must be called during a simulation.' % f.__name__) return getattr(algo_instance, f.__name__)(*args, **kwargs)
def wrapped(*args, **kwargs): # Get the instance and call the method algo_instance = get_algo_instance() if algo_instance is None: raise RuntimeError( 'zipline api method %s must be called during a simulation.' % f.__name__ ) return getattr(algo_instance, f.__name__)(*args, **kwargs)
def returns(self): algo = get_algo_instance() now = algo.datetime if now != self._returns_cache_dt: self._returns_cache_dt = now self._returns_cache = algo.history(2, '1d', 'price', ffill=True) hst = self._returns_cache[self._sid] return (hst.iloc[-1] - hst.iloc[0]) / hst.iloc[0]
def _get_bars(self, days): """ Gets the number of bars needed for the current number of days. Figures this out based on the algo datafrequency and caches the result. This caches the result by replacing this function on the object. This means that after the first call to _get_bars, this method will point to a new function object. """ def daily_get_bars(days): return days @with_environment() def minute_get_bars(days, env=None): cls = self.__class__ now = get_algo_instance().datetime if now != cls._minute_bar_cache_dt: cls._minute_bar_cache_dt = now cls._minute_bar_cache = {} if days not in cls._minute_bar_cache: # Cache this calculation to happen once per bar, even if we # use another transform with the same number of days. prev = env.previous_trading_day(now) ds = env.days_in_range( env.add_trading_days(-days + 2, prev), prev, ) # compute the number of minutes in the (days - 1) days before # today. # 210 minutes in a an early close and 390 in a full day. ms = sum(210 if d in env.early_closes else 390 for d in ds) # Add the number of minutes for today. ms += int( (now - env.get_open_and_close(now)[0]).total_seconds() / 60 ) cls._minute_bar_cache[days] = ms + 1 # Account for this minute return cls._minute_bar_cache[days] if get_algo_instance().sim_params.data_frequency == 'daily': self._freqstr = '1d' # update this method to point to the daily variant. self._get_bars = daily_get_bars else: self._freqstr = '1m' # update this method to point to the minute variant. self._get_bars = minute_get_bars # Not actually recursive because we have already cached the new method. return self._get_bars(days)
def _get_bars(self, days): """ Gets the number of bars needed for the current number of days. Figures this out based on the algo datafrequency and caches the result. This caches the result by replacing this function on the object. This means that after the first call to _get_bars, this method will point to a new function object. """ def daily_get_bars(days): return days @with_environment() def minute_get_bars(days, env=None): cls = self.__class__ now = get_algo_instance().datetime if now != cls._minute_bar_cache_dt: cls._minute_bar_cache_dt = now cls._minute_bar_cache = {} if days not in cls._minute_bar_cache: # Cache this calculation to happen once per bar, even if we # use another transform with the same number of days. prev = env.previous_trading_day(now) ds = env.days_in_range( env.add_trading_days(-days + 2, prev), prev, ) # compute the number of minutes in the (days - 1) days before # today. # 210 minutes in a an early close and 390 in a full day. ms = sum(210 if d in env.early_closes else 390 for d in ds) # Add the number of minutes for today. ms += int( (now - env.get_open_and_close(now)[0]).total_seconds() / 60) cls._minute_bar_cache[days] = ms + 1 # Account for this minute return cls._minute_bar_cache[days] if get_algo_instance().sim_params.data_frequency == 'daily': self._freqstr = '1d' # update this method to point to the daily variant. self._get_bars = daily_get_bars else: self._freqstr = '1m' # update this method to point to the minute variant. self._get_bars = minute_get_bars # Not actually recursive because we have already cached the new method. return self._get_bars(days)
def factors(self): algo = get_algo_instance() today = normalize_date(algo.get_datetime()) if today > self._factor_matrix_expires: self._factor_matrix, self._factor_matrix_expires = \ algo.compute_factor_matrix(today) try: return self._factor_matrix.loc[today] except KeyError: # This happens if no assets passed our filters on a given day. return pd.DataFrame( index=[], columns=self._factor_matrix.columns, )
def _append_to_window(self, event): self.field_names = self._get_field_names(event) if self.static_sids is None: sids = set(event.data.keys()) else: sids = self.static_sids # the panel sent to the transform code will have # columns masked with this set of sids. This is how # we guarantee that all (and only) the sids sent to the # algorithm's handle_data and passed to the batch # transform. See the get_data method to see it applied. # N.B. that the underlying panel grows monotonically # if the set of sids changes over time. self.latest_sids = sids # Create rolling panel if not existant if self.rolling_panel is None: self._init_panels(sids) # Store event in rolling frame self.rolling_panel.add_frame(event.dt, pd.DataFrame(event.data, index=self.field_names, columns=sids)) # update trading day counters # we may get events from non-trading sources which occurr on # non-trading days. The book-keeping for market close and # trading day counting should only consider trading days. env = get_algo_instance().trading_environment if env.is_trading_day(event.dt): _, mkt_close = env.get_open_and_close(event.dt) if self.bars == 'daily': # Daily bars have their dt set to midnight. mkt_close = env.normalize_date(mkt_close) if event.dt == mkt_close: if self.downsample: downsample_panel(self.rolling_panel, self.daily_rolling_panel, mkt_close ) self.trading_days_total += 1 self.mkt_close = mkt_close self.last_dt = event.dt if self.trading_days_total >= self.window_length: self.full = True
def _append_to_window(self, event): self.field_names = self._get_field_names(event) if self.static_sids is None: sids = set(event.data.keys()) else: sids = self.static_sids # the panel sent to the transform code will have # columns masked with this set of sids. This is how # we guarantee that all (and only) the sids sent to the # algorithm's handle_data and passed to the batch # transform. See the get_data method to see it applied. # N.B. that the underlying panel grows monotonically # if the set of sids changes over time. self.latest_sids = sids # Create rolling panel if not existant if self.rolling_panel is None: self._init_panels(sids) # Store event in rolling frame self.rolling_panel.add_frame( event.dt, pd.DataFrame(event.data, index=self.field_names, columns=sids)) # update trading day counters # we may get events from non-trading sources which occurr on # non-trading days. The book-keeping for market close and # trading day counting should only consider trading days. env = get_algo_instance().trading_environment if env.is_trading_day(event.dt): _, mkt_close = env.get_open_and_close(event.dt) if self.bars == 'daily': # Daily bars have their dt set to midnight. mkt_close = env.normalize_date(mkt_close) if event.dt == mkt_close: if self.downsample: downsample_panel(self.rolling_panel, self.daily_rolling_panel, mkt_close) self.trading_days_total += 1 self.mkt_close = mkt_close self.last_dt = event.dt if self.trading_days_total >= self.window_length: self.full = True
def _get_buffer(self, bars, field='price', raw=False): """ Gets the result of history for the given number of bars and field. This will cache the results internally. """ cls = self.__class__ algo = get_algo_instance() now = algo.datetime if now != cls._history_cache_dt: # For a given dt, the history call for this field will not change. # We have a new dt, so we should reset the cache. cls._history_cache_dt = now cls._history_cache = {} if field not in self._history_cache \ or bars > len(cls._history_cache[field][0].index): # If we have never cached this field OR the amount of bars that we # need for this field is greater than the amount we have cached, # then we need to get more history. hst = algo.history( bars, self._freqstr, field, ffill=True, ) # Assert that the column holds ints, not security objects. if not isinstance(self._sid, str): hst.columns = hst.columns.astype(int) self._history_cache[field] = (hst, hst.values, hst.columns) # Slice of only the bars needed. This is because we strore the LARGEST # amount of history for the field, and we might request less than the # largest from the cache. buffer_, values, columns = cls._history_cache[field] if raw: sid_index = columns.get_loc(self._sid) return values[-bars:, sid_index] else: return buffer_[self._sid][-bars:]
def _cache_daily_minutely(self, days, fn): """ Gets the number of bars needed for the current number of days. Figures this out based on the algo datafrequency and caches the result. This caches the result by replacing this function on the object. This means that after the first call to _get_bars, this method will point to a new function object. """ def daily_get_max_bars(days): return days def minute_get_max_bars(days): # max number of minute. regardless of current days or short # sessions return days * 390 def daily_get_bars(days): return days def minute_get_bars(days): cls = self.__class__ now = get_algo_instance().datetime if now != cls._minute_bar_cache_dt: cls._minute_bar_cache_dt = now cls._minute_bar_cache = {} if days not in cls._minute_bar_cache: # Cache this calculation to happen once per bar, even if we # use another transform with the same number of days. env = get_algo_instance().trading_environment prev = env.previous_trading_day(now) ds = env.days_in_range( env.add_trading_days(-days + 2, prev), prev, ) # compute the number of minutes in the (days - 1) days before # today. # 210 minutes in a an early close and 390 in a full day. ms = sum(210 if d in env.early_closes else 390 for d in ds) # Add the number of minutes for today. ms += int( (now - env.get_open_and_close(now)[0]).total_seconds() / 60) cls._minute_bar_cache[days] = ms + 1 # Account for this minute return cls._minute_bar_cache[days] if get_algo_instance().sim_params.data_frequency == 'daily': self._freqstr = '1d' # update this method to point to the daily variant. self._get_bars = daily_get_bars self._get_max_bars = daily_get_max_bars else: self._freqstr = '1m' # update this method to point to the minute variant. self._get_bars = minute_get_bars self._get_max_bars = minute_get_max_bars # NOTE: This silently adds these two entries to the `__dict__` # without affecting the `__len__` of the object. This is important # because we use the `len` of the `SIDData` object to see if we have # data for this asset. self._initial_len += 2 # Not actually recursive because we have already cached the new method. return getattr(self, fn)(days)
def _cache_daily_minutely(self, days, fn): """ Gets the number of bars needed for the current number of days. Figures this out based on the algo datafrequency and caches the result. This caches the result by replacing this function on the object. This means that after the first call to _get_bars, this method will point to a new function object. """ def daily_get_max_bars(days): return days def minute_get_max_bars(days): # max number of minute. regardless of current days or short # sessions return days * 390 def daily_get_bars(days): return days def minute_get_bars(days): cls = self.__class__ now = get_algo_instance().datetime if now != cls._minute_bar_cache_dt: cls._minute_bar_cache_dt = now cls._minute_bar_cache = {} if days not in cls._minute_bar_cache: # Cache this calculation to happen once per bar, even if we # use another transform with the same number of days. env = get_algo_instance().trading_environment prev = env.previous_trading_day(now) ds = env.days_in_range( env.add_trading_days(-days + 2, prev), prev, ) # compute the number of minutes in the (days - 1) days before # today. # 210 minutes in a an early close and 390 in a full day. ms = sum(210 if d in env.early_closes else 390 for d in ds) # Add the number of minutes for today. ms += int( (now - env.get_open_and_close(now)[0]).total_seconds() / 60 ) cls._minute_bar_cache[days] = ms + 1 # Account for this minute return cls._minute_bar_cache[days] if get_algo_instance().sim_params.data_frequency == 'daily': self._freqstr = '1d' # update this method to point to the daily variant. self._get_bars = daily_get_bars self._get_max_bars = daily_get_max_bars else: self._freqstr = '1m' # update this method to point to the minute variant. self._get_bars = minute_get_bars self._get_max_bars = minute_get_max_bars # NOTE: This silently adds these two entries to the `__dict__` # without affecting the `__len__` of the object. This is important # because we use the `len` of the `SIDData` object to see if we have # data for this asset. self._initial_len += 2 # Not actually recursive because we have already cached the new method. return getattr(self, fn)(days)
def wrapped(*args, **kwargs): # Get the instance and call the method return getattr(get_algo_instance(), f.__name__)(*args, **kwargs)
def __enter__(self): """ Set the given algo instance, storing any previously-existing instance. """ self.old_algo_instance = get_algo_instance() set_algo_instance(self.algo_instance)