Exemple #1
0
    def _on_data_update(cls, data) -> None:
        """
        Adds the new candle to the queues, which are processed by AbstractAccount's on other threads.
        """

        # Get candle's symbol
        symbol = data.symbol

        # Get candle's pandas timestamp as a datetime
        moment = datetime.strptime(data.start.strftime(DATE_TIME_FORMAT),
                                   DATE_TIME_FORMAT)

        # Compile candle object
        candle = Candle(moment=moment,
                        open=data.open,
                        high=data.high,
                        low=data.low,
                        close=data.close,
                        volume=data.volume)

        # Signal the running strategy to respond to this new price data
        cls._queue_update(moment=moment,
                          update_type=StreamUpdateType.CANDLE,
                          symbol=symbol,
                          candle=candle.to_json())
Exemple #2
0
 def get_candle(self) -> Candle:
     """
     Returns this update's candle.
     Only for StreamUpdateType.CANDLE.
     """
     return None if self.raw_data['candle'] is None else Candle.from_json(
         self.raw_data['candle'])
Exemple #3
0
 def from_json(cls, data: Dict[str, any]) -> Optional['SymbolDay']:
     """Converts the json dictionary into a SymbolDay object."""
     try:
         candles = [
             Candle.from_json(candle_json)
             for candle_json in data['candles']
         ]
         return SymbolDay(data['symbol'], data['day_date'], candles)
     except Exception as e:
         traceback.print_exc()
         return None
Exemple #4
0
    def feed_model(self, day_data: SymbolDay) -> None:
        """
        Calculates the strongest dip during the first 45 minutes of CycleStrategy's typical run window.
        i.e. it predicts the worst dip that should be expected within 45 minutes of buying the symbol.
        """
        # Find price at minute 60
        start_candle: Candle = day_data.get_candle_at_sec(
            datetime.combine(day_data.day_date, time(hour=10, minute=30)))
        if start_candle is None:
            self.warn_process(
                "Couldn't update dip_45 analysis_model for CycleStrategy. Bad data at minute 60."
            )
            return

            # Find lowest price within 45 minutes after minute 60
        start_time = datetime.combine(day_data.day_date,
                                      time(hour=10, minute=30))
        end_time = datetime.combine(day_data.day_date, time(
            hour=10, minute=30)) + timedelta(minutes=45)
        lowest_candle: Candle = Candle(start_candle.moment, start_candle.open,
                                       start_candle.high, start_candle.low,
                                       start_candle.close, start_candle.volume)
        for candle in day_data.candles:
            if candle.low < lowest_candle.low and start_time < candle.moment < end_time:
                lowest_candle = candle
                lowest_candle = lowest_candle

        # Calculate the greatest downward price change as a percentage
        strongest_dip_pct = 100.0 * max(
            0.0, start_candle.low - lowest_candle.low) / start_candle.low

        # Load the current running sum
        current_sum = self.redis().get_analysis_rolling_sum(
            day_data.symbol, self.model_type)

        # Skip days that don't dip since this model is only interested in forecasting dips
        if strongest_dip_pct == 0:
            output = current_sum

        # Merge this day into the running sum
        else:
            output = RollingSumFormulas.combine(
                current_sum, strongest_dip_pct,
                RollingSumFormulas.get_30_day_weight())

        # Save model's output
        self.save_output(symbol=day_data.symbol,
                         raw_output=output,
                         day_date=day_data.day_date)
 def __init__(self,
              trendline_candles: List[Candle]):
     """
     Fits regression lines through local minima and maxima of prices during the trendline period.
     """
     self.candles = trendline_candles
     try:
         self.local_minima, self.local_maxima = find_mins_maxs(trendline_candles)
         self.minima_regression = SimpleLinearRegression(self.local_minima)
         self.maxima_regression = SimpleLinearRegression(self.local_maxima)
     except Exception as e:
         traceback.print_exc()
         try:
             moment_1 = datetime.combine(START_DATE, OPEN_TIME) + timedelta(seconds=1)
             moment_2 = datetime.combine(START_DATE, OPEN_TIME) + timedelta(seconds=2)
             candle_1 = Candle(moment_1, 0, 0, 0, 0, 1)
             candle_2 = Candle(moment_2, 0, 0, 0, 0, 1)
             self.candles = [candle_1, candle_2]
             self.local_minima = [candle_1, candle_2]
             self.local_maxima = [candle_1, candle_2]
             self.minima_regression = SimpleLinearRegression([candle_1, candle_2])
             self.maxima_regression = SimpleLinearRegression([candle_1, candle_2])
         except Exception as e:
             traceback.print_exc()
Exemple #6
0
    def get_cached_candles(self, symbol: str, day_date: date) -> List[Candle]:
        """
        Gets candles cached from polygon stream.
        """

        # Get raw candle data from redis.
        candles_data = self.client.lrange(self.get_prefix() + 'STREAM-CANDLES_' + symbol, 0, -1)
        candles_data = [candle_str.decode("utf-8") for candle_str in candles_data]

        # Decode candles.
        if candles_data is None:
            return []
        candles = [Candle.from_str(candle_str) for candle_str in candles_data]
        candles.sort(key=lambda candle_to_sort: candle_to_sort.moment)

        # Filter out candles from other days.
        return [candle for candle in candles if candle.moment.date() == day_date]
Exemple #7
0
 def create_dummy_day(self, symbol: str, day_date: date,
                      num_candles: int) -> SymbolDay:
     """Creates a SymbolDay with mock price data."""
     dummy_candles = []
     dummy_moment = datetime.combine(day_date, OPEN_TIME)
     for i in range(num_candles):
         dummy_candles.append(
             Candle(
                 moment=dummy_moment,
                 open=0.001,
                 high=0.001,
                 low=0.001,
                 close=0.001,
                 volume=999,
             ))
         dummy_moment += timedelta(seconds=1)
     return SymbolDay(symbol, day_date, dummy_candles)
    def _parse_ticks_in_intervals(
            self, symbol: str,
            intervals: 'list of pairs of ascending dates') -> List[Candle]:
        """
        :param intervals: e.x. [[start_1, end_1], [start_2, end_2]]; lower limits inclusive; upper limits exclusive
        """

        # Set all timezones to EST
        for i in range(len(intervals)):
            intervals[i][0] = timezone('America/New_York').localize(
                intervals[i][0])
            intervals[i][1] = timezone('America/New_York').localize(
                intervals[i][1])

        # Parse 50k-count batches of ticks (individual trades) into second-resolution candles
        interval_index = 0
        moment = intervals[interval_index][0].replace(microsecond=0)
        basket_start_moment = moment
        ticks_in_basket = []
        candles = []
        alpaca_client = ata.REST()
        batches_aggregated = 0
        ns_offset = int(moment.timestamp()) * 1000000000
        while True:

            # Fetch next batch of up to 50k trades, starting at moment

            try:

                # Ensure api cooldown is not too long
                if self.next_api_call - self.time_env.now() > timedelta(
                        seconds=self.MAX_RATE_LIMIT_WAIT):
                    self.next_api_call = self.time_env.now() + timedelta(
                        seconds=0.1)
                while self.time_env.now() < self.next_api_call:
                    pytime.sleep(0.5)

                # Request batch of candles from polygon-rest
                batch_response = alpaca_client.polygon.get(
                    path=f'/ticks/stocks/trades/{symbol}/'
                    f'{moment.date().strftime(POLYGON_DATE_FORMAT)}',
                    params={
                        'timestamp': ns_offset,
                        'limit': 50000
                    },
                    version='v2')
                ticks_in_basket.extend(batch_response['results'])
                batches_aggregated += 1
                ns_offset = ticks_in_basket[-1]['t']

            except Exception as e:

                # On error response, double our wait time before the next query
                self.rate_limit_wait = self.INITIAL_RATE_LIMIT_WAIT if self.rate_limit_wait == 0 \
                    else self.rate_limit_wait * 2

                # Re-attempt to fetch after applying the wait time
                if self.rate_limit_wait <= self.MAX_RATE_LIMIT_WAIT:
                    self.next_api_call = self.time_env.now() + timedelta(
                        seconds=self.rate_limit_wait)
                    # Re-attempt to collect this data
                    continue

            # On too many error responses and empty basket, end task
            if self.rate_limit_wait > self.MAX_RATE_LIMIT_WAIT and len(
                    ticks_in_basket) == 0:
                self.warn_process(
                    f'Couldn\'t collect {symbol} candles from polygon starting on {moment:%d-%m-%Y} '
                    f'at {moment:%H:%M:%S}! Stopping collection early')
                return candles
            # On too many error responses and nonempty basket, finish parsing basket and then end task
            elif self.rate_limit_wait > self.MAX_RATE_LIMIT_WAIT:
                pass
            # On out of market-hours batch, finish parsing basket and then end task
            elif moment > timezone('America/New_York').localize(
                    datetime.combine(moment.date(), CLOSE_TIME)):
                self.rate_limit_wait = self.MAX_RATE_LIMIT_WAIT
            # On few or no error responses, keep fetching until basket fills up
            elif batches_aggregated <= self.MAX_SIMULTANEOUS_BATCHES \
                    and self.rate_limit_wait <= self.MAX_RATE_LIMIT_WAIT \
                    and moment <= timezone('America/New_York').localize(
                datetime.combine(moment.date(), CLOSE_TIME) + timedelta(minutes=45)):
                continue
            # On empty basket and termination condition, return what we have
            elif len(ticks_in_basket) == 0:
                break

            # Aggregate ticks from the API response into second-resolution Candle objects
            prices_in_moment = []
            volume_in_moment = 0
            candles_before_basket = len(candles)
            moment = basket_start_moment
            for tick_data in ticks_in_basket:
                tick_moment = pd.Timestamp(tick_data['t'],
                                           tz='America/New_York',
                                           unit='ns').to_pydatetime()
                if tick_moment < moment:
                    # Skip over trades made before the truncated second
                    continue
                if tick_moment < moment + timedelta(seconds=1):
                    # Count this trade toward the candle during which it was made
                    prices_in_moment.append(tick_data['p'])
                    volume_in_moment += tick_data['s']
                else:
                    # At the end of the second (i.e. x.999),
                    # use the accumulated trades to create a Candle object
                    if len(prices_in_moment) > 0 and volume_in_moment > 0:
                        candle = Candle(moment=moment.replace(tzinfo=None),
                                        open=prices_in_moment[0],
                                        high=max(prices_in_moment),
                                        low=min(prices_in_moment),
                                        close=prices_in_moment[-1],
                                        volume=volume_in_moment)
                        # Store the candle
                        if OPEN_TIME <= candle.moment.time() < CLOSE_TIME:
                            candles.append(candle)
                    # Move on to the next truncated second, starting with this trade
                    prices_in_moment = [tick_data['p']]
                    volume_in_moment = tick_data['s']
                    moment = tick_moment.replace(microsecond=0)

            # Move to the next second if fetching this basket ended in errors or an empty basket
            if self.rate_limit_wait > self.MAX_RATE_LIMIT_WAIT or len(
                    candles) == candles_before_basket:
                basket_start_plus_1 = basket_start_moment + timedelta(
                    seconds=1)
                last_parsed_plus_1 = moment + timedelta(seconds=1)
                moment = basket_start_plus_1 if basket_start_plus_1 > last_parsed_plus_1 else last_parsed_plus_1

            # When moment advances beyond current interval, jump to next interval or end task
            if ns_offset >= intervals[interval_index][1].timestamp(
            ) * 1000000000:

                # Return after collecting candles for the final time interval
                if interval_index == len(intervals) - 1:
                    break

                # Move on to the next time interval
                else:
                    interval_index += 1
                    moment = intervals[interval_index][0]

            # Reset basket data so the next batches can be fetched
            batches_aggregated = 0
            self.rate_limit_wait = 0
            basket_start_moment = moment
            ns_offset = int(moment.timestamp()) * 1000000000
            ticks_in_basket = []

        return candles
Exemple #9
0
    def run(self) -> HealthCheckResult:
        symbol = 'ALXN'
        dates = self.mongo().get_dates_on_file(symbol, START_DATE,
                                               self.time().now().date())

        self.debug(
            'max dip45 result for a day to be viable (from CycleStrategy settings): 1.4'
        )
        sample_size = min(120, len(dates) - 1)
        end_index = len(dates) - 1
        start_index = end_index - sample_size
        rolling_sum = 0
        days_passing = 0
        self.debug('analyzing {0} days'.format(sample_size))
        for date_index in range(start_index, end_index):
            # Pause or else gunicorn worker threads will be starved out
            pytime.sleep(0.01)

            # Get data for the day
            day_date = dates[date_index]
            day_data = self.mongo().load_symbol_day(symbol, day_date)

            # Skip if the day has no data
            if len(day_data.candles) < MIN_CANDLES_PER_DAY:
                self.debug('skipping {0}'.format(day_data.day_date))
                continue

            # Find price at minute 60
            start_candle: Candle = day_data.get_candle_at_sec(
                datetime.combine(day_data.day_date, time(hour=10, minute=30)))
            if start_candle is None:
                self.debug(
                    "couldn't calculate dip_45 on {0}. Bad data at minute 60".
                    format(day_date))
                continue

            # Find lowest price within 45 minutes after minute 60
            start_time = datetime.combine(day_data.day_date,
                                          time(hour=10, minute=30))
            end_time = datetime.combine(
                day_data.day_date, time(hour=10,
                                        minute=30)) + timedelta(minutes=45)
            lowest_candle: Candle = Candle(start_candle.moment,
                                           start_candle.open,
                                           start_candle.high, start_candle.low,
                                           start_candle.close,
                                           start_candle.volume)
            for candle in day_data.candles:
                if candle.low < lowest_candle.low and start_time < candle.moment < end_time:
                    lowest_candle = candle
                    lowest_candle = lowest_candle

            # Calculate the greatest downward price change as a percentage
            strongest_dip = 100.0 * max(
                0.0, start_candle.low - lowest_candle.low) / start_candle.low
            if strongest_dip > self.MAX_DIP45:
                days_passing += 1

            self.debug('{0} dip on {1}  ({2}viable)'.format(
                "%.2f" % strongest_dip, day_data.day_date,
                'not ' if strongest_dip > self.MAX_DIP45 else ''))

            # Calculate the new rolling sum for the analysis model
            rolling_sum = rolling_sum if strongest_dip == 0 \
                else RollingSumFormulas.combine(rolling_sum, strongest_dip, RollingSumFormulas.get_30_day_weight())

            # Print the new rolling sum
            self.debug('rolling sum is now {0}'.format("%.2f" % rolling_sum))

        # Pass the health check if its conditions are met
        self.set_passing(True)
        if days_passing < (1 / 5) * sample_size:
            self.set_passing(False)
        if not 1 <= rolling_sum <= 5:
            self.set_passing(False)

        return self.make_result()