def get_candles_within(self, interval: int = INTERVAL_DAY, start: datetime = datetime.min, finish: datetime = datetime.max, exclude_start: bool = False, exclude_finish: bool = False, exclude_filler: bool = False) -> 'QuerySet[Candle]': """Returns the candles for the asset on the given Candle interval within the timeframe specified. Keyword Arguments: interval {int} -- The number of seconds each Candle represents. (default: {INTERVAL_DAY}) start {datetime} -- The starting datetime of the interval. (default: {datetime.datetime.min}) finish {datetime} -- The finishing datetime of the interval. (default: {datetime.datetime.max}) exclude_start {bool} -- Whether to exclude the start datetime from the interval. (default: {False}) exclude_finish {bool} -- Whether to exclude the finish datetime from the interval. (default: {False}) exclude_filler {bool} -- Whether to exclude filler candles from the results. (default: {False}) Returns: QuerySet[Candle] -- An iterable QuerySet containing Candles in the collection which match the query. """ return Candle.get_asset_within(self, interval=interval, start=start, finish=finish, exclude_start=exclude_start, exclude_finish=exclude_finish, exclude_filler=exclude_filler)
def do_analysis(self): """Метод подготовки прогнозов""" # Получаем свечи разной длинны # candles = Candle.get_last(self.quotation.ts, self.task.setting.analyzer_deep, # self.task.setting.instrument_id, "parent") candles = Candle.get_last_with_nesting( self.quotation.ts, self.task.setting.analyzer_deep, self.task.setting.instrument_id, self.task.setting.candles_durations, "parent") # Получаем разные вариации последовательностей c глубиной вхождения sequences = Sequence.get_sequences_json(self.task, candles) sequences_models = [] for sequence in sequences: if len(sequence) >= self.task.setting.analyzer_min_deep: sequences_models.append(Sequence.make(sequence)) if len(sequences_models) > 0: patterns_models = [] predictions_models = [] for time_bid in self.task.setting.analyzer_bid_times: for seq_raw in sequences_models: prediction = Prediction.make(self.task, time_bid, self.quotation) # Проверка оставшегося времени до ставки if prediction.time_to_expiration >= ( time_bid['time'] - time_bid['admission']): pattern = Pattern.make(self.task, seq_raw, time_bid, self.quotation) predictions_models.append(prediction) patterns_models.append(pattern) if len(patterns_models) > 0: patterns = Pattern.save_many(patterns_models) i = 0 for pat_rec in patterns: predictions_models[i].pattern_id = pat_rec.id if Controller.check_on_make_prediction(self.task, pat_rec): self.task.storage.predictions.append( predictions_models[i]) if Controller.check_on_make_signal(self.task, pat_rec, predictions_models[i], self.quotation): # Проверка условий вероятности при создании сигнала direction = Signaler.check(self.task, pat_rec) if direction: Signaler.make_and_save(self.task, direction, pat_rec, predictions_models[i]) if self.task.get_param("history_num", 0) > 0: signals_count = self.task.get_status( "checker_signals_count", 0) self.task.update_status( "checker_signals_count", signals_count + 1) i += 1
def get_first_candle(self, interval: int = INTERVAL_DAY) -> 'Candle': """Returns the first candle for the asset on the given interval. Keyword Arguments: interval {int} -- The number of seconds each Candle represents. (default: {INTERVAL_DAY}) Returns: Candle -- A Candle object matching the query - None if cannot be found. """ return Candle.get_asset_first_candle(self, interval)
def get_candles(self, interval: int = INTERVAL_DAY) -> 'QuerySet[Candle]': """Returns the candles for the asset on the given Candle interval. Keyword Arguments: interval {int} -- The number of seconds each Candle represents. (default: {INTERVAL_DAY}) Returns: QuerySet[Candle] -- An iterable QuerySet containing Candles in the collection which match the query. """ return Candle.get_asset(self, interval)
def get_last_candle(self, interval: int = INTERVAL_DAY, market_open: bool = False) -> 'Candle': """Returns the last candle for the asset on the given interval. Keyword Arguments: interval {int} -- The number of seconds each Candle represents. (default: {INTERVAL_DAY}) market_open {bool} -- Whether the market needs to be open on the provided candle - i.e. not filler candle. (default: {False}) Returns: Candle -- A Candle object matching the query - None if cannot be found. """ return Candle.get_asset_last_candle(self, interval, market_open)
def get_daily_candle(self, date: datetime) -> 'Candle': """Returns the model.constants.INTERVAL_DAY candle for the given date. Arguments: date {datetime/date} -- The datetime (date extracted) or date for which the Candle is required. Returns: Candle -- The daily Candle object matching the query. """ if isinstance(date, datetime): date = date.date() next_date = date + timedelta(days=1) return Candle.get_asset_within(self, INTERVAL_DAY, date, next_date, False, True).first()
def aggregate_candles(self, asset: 'Asset', interval: int, candles: 'QuerySet[Candle]') -> 'Candle': """Combines multiple candles into a given candle of the given interval. Arguments: asset {Asset} -- The Asset for the resulting Candle is for. interval {int} -- The time interval (in seconds) of the Candle. candles {QuerySet[Candle]} -- An iterable QuerySet of the Candles to be combined. Returns: Candle -- The resulting aggregate Candle for the given data. """ low_price = None high_price = None volume = 0 open_price = 0 open_stamp = None close_price = 0 close_stamp = None for candle in candles: if candle.get_open() is None: continue if low_price is None: low_price = candle.get_low() else: if candle.get_low() < low_price: low_price = candle.get_low() if high_price is None: high_price = candle.get_high() else: if candle.get_high() > high_price: high_price = candle.get_high() if candle.get_volume() is not None: volume += candle.get_volume() if open_stamp is None or candle.get_open_time() < open_stamp: open_price = candle.get_open() open_stamp = candle.get_open_time() if close_stamp is None or candle.get_open_time() > close_stamp: close_price = candle.get_close() close_stamp = candle.get_open_time() if volume == 0: volume = None return Candle(asset=asset, low=low_price, high=high_price, open=open_price, close=close_price, volume=volume, open_time=open_stamp, interval=interval)
def save_candles(self): candles_durations = self.task.setting.candles_durations Candle.save_through_pg(self.quotation.ts, candles_durations, self.task.setting.instrument_id)
def sync_asset_weekly(self, asset: 'Asset') -> List: """Calculates the weekly candles based on an aggregation of daily candles for the given asset. Arguments: asset {Asset} -- The Asset to perform the calculations on. Returns: List -- Returns in a List whether the update was successful (bool) and the number of candles inserted (int). """ CONFIG.DATA_LOGGER.info( "AssetUpdaterAggregation -> sync_asset_weekly(%s) -> start", asset.get_name()) last_candle = asset.get_last_candle(interval=INTERVAL_WEEK) candles = [] if last_candle is None: first_daily_candle = asset.get_first_candle() if first_daily_candle is None: CONFIG.DATA_LOGGER.error( "AssetUpdaterAggregation -> sync_asset_weekly() -> 1") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) return [False, 0] weekday = first_daily_candle.get_open_time().weekday() if weekday != 0: first_daily_candle = asset.get_daily_candle( first_daily_candle.get_open_time() + timedelta(days=(7 - weekday))) if first_daily_candle is None: CONFIG.DATA_LOGGER.error( "AssetUpdaterAggregation -> sync_asset_weekly() -> 2") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) return [False, 0] week_start = first_daily_candle.get_open_time().date() week_end = week_start + timedelta(days=7) else: last_weekly_candle = asset.get_last_candle(interval=INTERVAL_WEEK) if last_weekly_candle is None: CONFIG.DATA_LOGGER.error( "AssetUpdaterAggregation -> sync_asset_weekly() -> 3") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) return [False, 0] weekday = last_weekly_candle.get_open_time().weekday() if weekday != 0: week_start = last_weekly_candle.get_open_time().date( ) + timedelta(days=(7 - weekday)) else: week_start = last_weekly_candle.get_open_time().date( ) + timedelta(days=7) week_end = week_start + timedelta(days=7) while week_end < datetime.utcnow().date(): week_of_candles = Candle.get_asset_within(asset=asset, interval=INTERVAL_DAY, start=week_start, finish=week_end, exclude_finish=True) candles.append( self.aggregate_candles(asset, INTERVAL_WEEK, week_of_candles)) week_start = week_start + timedelta(days=7) week_end = week_start + timedelta(days=7) if candles: Candle.objects.insert(candles) CONFIG.DATA_LOGGER.info( "AssetUpdaterAggregation -> sync_asset_weekly(%s) -> finish", asset.get_name()) return [True, len(candles)]
def sync_asset(self, asset: 'Asset') -> List: """Updates the daily stock data for the specified asset. Arguments: asset {Asset} -- The Asset to be updated. Returns: List -- Returns a list reflecting whether the update was successful (bool) and the number of entries updated (int). """ CONFIG.DATA_LOGGER.info("StockUpdaterDaily -> sync_asset(%s) -> start", asset.get_name()) # Get the last updated candle latest_candle = asset.get_last_candle() # By default we won't update sync_type = None if latest_candle is not None: curr_time = datetime.utcnow() # Calculate how many days ago the last full synced candle was diff = (curr_time - latest_candle.get_open_time() ).total_seconds() / INTERVAL_DAY CONFIG.DATA_LOGGER.debug( "StockUpdaterDaily -> sync_asset(%s) -> diff is %s", asset.get_name(), str(diff)) # Only update if it is greater than the update interval (2 days - as it is open) if diff > DAILY_UPDATE_INTERVAL: # Compact sync will only return last 100 candles, so to reduce network usage # only sync what we need to if diff >= DAILY_COMPACT_THRESHOLD: sync_type = DAILY_SYNC_FULL else: sync_type = DAILY_SYNC_COMPACT else: # If we have no Candles in the dataset, we need to do a full sync sync_type = DAILY_SYNC_FULL # Return success in syncing 0 results if sync_type is None: CONFIG.DATA_LOGGER.info( "StockUpdaterDaily -> sync_asset(%s) -> finish(nosync)", asset.get_name()) return [True, 0] counter = 0 # Ensure we haven't attempted to sync too many times with failing while counter < DAILY_MAX_RETRIES: try: # Notify the provider we intend on making the request - ensure we are under quotas self.provider.make_request() # Make the request with the given ticker data = self.api.get_daily(asset.get_ticker(), outputsize=sync_type) counter = DAILY_MAX_RETRIES except Exception as ex: # Log the details of the error if we fail CONFIG.DATA_LOGGER.error( "Failed to update daily data for %s -> Attempt %s", asset.get_name(), str(counter)) #CONFIG.DATA_LOGGER.error("StockUpdaterDaily -> sync_asset() -> 1") #CONFIG.DATA_LOGGER.error(str(asset.as_dict())) #CONFIG.DATA_LOGGER.exception(str(ex)) # Wait for specified time by configuration sleep(CONFIG.ERROR_WAIT_TIME) # Increment the number of failures counter += 1 if counter == DAILY_MAX_RETRIES: CONFIG.DATA_LOGGER.error( "Failed to update daily data for %s -> Attempt %s (Terminated)", asset.get_name(), str(counter)) os._exit(1) # If no data was returned, notify that we failed to sync the data if data is None: return [False, 0] candles = [] # Loop through all the days in the response for date in data[0]: # Extract the data for a particular day in the JSON candle_data = data[0][date] if candle_data is None: # Log the details of the error if we get a None response here CONFIG.DATA_LOGGER.error( "StockUpdaterDaily -> sync_asset() -> 2") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) CONFIG.DATA_LOGGER.error(date) # Terminate syncing this asset return [False, 0] try: datestamp = parser.parse(date) datestamp = datestamp.replace(tzinfo=tz.gettz("US/Eastern")) datestamp = datestamp.astimezone(UTC).replace(tzinfo=None) except Exception as ex: CONFIG.DATA_LOGGER.error( "StockUpdaterDaily -> sync_asset() -> 3") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) CONFIG.DATA_LOGGER.error(date) return [False, 0] if datestamp is None: # Log the details of the error if we fail to parse the date CONFIG.DATA_LOGGER.error( "StockUpdaterDaily -> sync_asset() -> 4") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) CONFIG.DATA_LOGGER.error(date) # Terminate syncing this asset return [False, 0] # Don't sync daily data for the current date (candle incomplete) if datestamp.date() == datetime.utcnow().date(): continue # If we find a candle stamped prior to our most recently updated one, # then we know we can terminate parsing the response here if latest_candle is not None and datestamp.date( ) <= latest_candle.get_open_time().date(): break try: # Try and parse all the elements to create the candle object candle = Candle(asset=asset, open=float(candle_data['1. open']), high=float(candle_data['2. high']), low=float(candle_data['3. low']), close=float(candle_data['4. close']), volume=float(candle_data['5. volume']), open_time=datestamp, interval=INTERVAL_DAY) except Exception as ex: # If we fail, log all the details of the error CONFIG.DATA_LOGGER.error( "StockUpdaterDaily -> sync_asset() -> 5") CONFIG.DATA_LOGGER.error(str(asset.as_dict())) CONFIG.DATA_LOGGER.exception(str(ex)) # Terminate syncing this asset return [False, 0] # If we have already parsed a candle already from the response if candles: # Then grab the date of the candle target_day = candles[-1].get_open_time().date() else: # Otherwise grab the current date target_day = datetime.utcnow().date() # Calculate the potential 'filler' candle - i.e. if we have a weekend or closed trading day filler_candle_stamp = candle.get_open_time() + timedelta(days=1) # Loop over the days between the candle date and the target date while filler_candle_stamp.date() != target_day: # Add the filler candles with closes set to the earliest candle candles.append( Candle(asset=asset, close=candle.get_close(), open_time=filler_candle_stamp, interval=INTERVAL_DAY)) filler_candle_stamp = filler_candle_stamp + timedelta(days=1) # Finally append the original candle (to maintain the insertion order) candles.append(candle) if candles: # if we have candles to insert, then insert them all now Candle.objects.insert(candles) asset.update_earliest_timestamp() asset.save() else: target_day = datetime.utcnow().date() filler_candle_stamp = latest_candle.get_open_time() + timedelta( days=1) while filler_candle_stamp.date() != target_day: candles.append( Candle(asset=asset, close=latest_candle.get_close(), open_time=filler_candle_stamp, interval=INTERVAL_DAY)) filler_candle_stamp = filler_candle_stamp + timedelta(days=1) if candles: Candle.objects.insert(candles) CONFIG.DATA_LOGGER.info( "StockUpdaterDaily -> sync_asset(%s) -> finish(sync)", asset.get_name()) return [True, len(candles)]