class PriceHistoryTestCase(TestCase): def setUp(self): self.price_history = PriceVolumeHistoryStorage( ticker=ticker1, exchange=exchange, timestamp=timestamp, index=index, value=value ) def test_must_use_standard_index(self): self.price_history.index = "some_other_price" self.assertRaises( PriceVolumeHistoryException, self.price_history.save ) def test_old_timestamp_raises_exception(self): def instantiate_old_price_history(): old_price_history = PriceVolumeHistoryStorage( ticker=ticker1, exchange=exchange, timestamp=JAN_1_2017_TIMESTAMP - 1, # too old index=index, value=value ) self.assertRaises( TimeseriesException, instantiate_old_price_history ) def test_dublicate_returns_0(self): self.price_history.index = "close_price" self.price_history.value = 12345 self.price_history.unix_timestamp = JAN_1_2017_TIMESTAMP + 298343 # first time saves, returns 1 for 1 entry added self.assertEqual(self.price_history.save(), 1) # second time saves, returns 0 for duplicate, no entry added self.assertEqual(self.price_history.save(), 0) def test_query_tolerance(self): query_results = PriceVolumeHistoryStorage.query(ticker=ticker1, exchange=exchange, index=index, timestamp=timestamp, periods_range=1, timestamp_tolerance=29) self.assertLess(len(query_results['values']), 7) def tearDown(self): from settings.redis_db import database database.delete(self.price_history.get_db_key())
def get(self, request, ticker): ticker = ticker or request.query_params.get('ticker') exchange = request.query_params.get('exchange') timestamp = request.query_params.get('timestamp') index = request.query_params.get('index') results_dict = PriceVolumeHistoryStorage.query( ticker=ticker, exchange=exchange, index=index, timestamp=timestamp, periods_range=12, timestamp_tolerance=29) if len(results_dict) and not 'error' in results_dict: return Response(results_dict, status=status.HTTP_200_OK) else: return Response(results_dict, status=status.HTTP_404_NOT_FOUND)
def redisCleanup(): try: do_not_disturb = bool(int(database.get("working on old stuff").decode("utf-8"))) except: database.set("working on old stuff", 0) do_not_disturb = False if do_not_disturb: return logger.info("I'M CLEANING REDIS !!!") now_timestamp = int(time.time()) # PriceVolumeHistoryStorage # delete all values 2 hours old or older old_for_pv_history_timestamp = now_timestamp - (3600 * 2) # 2 hours from apps.TA.storages.data.pv_history import PriceVolumeHistoryStorage old_score = PriceVolumeHistoryStorage.score_from_timestamp(old_for_pv_history_timestamp) pv_history_keys = database.keys(f'*:PriceVolumeHistoryStorage:*') for pv_history_key in pv_history_keys: try: database.zremrangebyscore(pv_history_key, 0, old_score) except Exception as e: logger.error(str(e)) #PriceVolumeHistoryStorage from datetime import datetime, timedelta from apps.TA.storages.abstract.timeseries_storage import TimeseriesStorage old_score = TimeseriesStorage.score_from_timestamp(datetime(2017, 1, 1).timestamp()) highest_allowed_score = TimeseriesStorage.score_from_timestamp((datetime.today() + timedelta(days=1)).timestamp()) for price_history_key in database.keys("*PriceVolumeHistoryStorage*"): # remove anything without a valid score (valid is between jan_1_2017 and today using timeseries score) database.zremrangebyscore(price_history_key, 0, old_score) database.zremrangebyscore(price_history_key, highest_allowed_score, datetime.today().timestamp()) #PriceVolumeHistoryStorage # PriceStorage # delete all values 200 days old or older if STAGE: old_for_price_timestamp = now_timestamp - (5 * SMA_LIST[-1]) # 200 periods on short else: old_for_price_timestamp = now_timestamp - (3600 * 24 * SMA_LIST[-1]) # 200 days from apps.TA.storages.data.price import PriceStorage old_score = PriceStorage.score_from_timestamp(old_for_price_timestamp) price_history_keys = database.keys(f'*:PriceStorage:*') for price_history_key in price_history_keys: try: database.zremrangebyscore(price_history_key, 0, old_score) except Exception as e: logger.error(str(e)) if STAGE: # remove all poloniex and bittrex data for now # todo: remove this and make sure it's not necessary for key in database.keys("*:poloniex:*"): database.delete(key) for key in database.keys("*:bittrex:*"): database.delete(key)
def save_pv_histories_to_redis(ph_object, pipeline=None): if ph_object.source is not BINANCE or ph_object.counter_currency not in [ BTC, USDT ]: return pipeline or [0] using_local_pipeline = (not pipeline) if using_local_pipeline: pipeline = database.pipeline() # transaction=False ticker = f'{ph_object.transaction_currency}_{ph_object.get_counter_currency_display()}' exchange = str(ph_object.get_source_display()) unix_timestamp = int(ph_object.timestamp.timestamp()) # SAVE VALUES IN REDIS USING PriceVolumeHistoryStorage OBJECT # CREATE OBJECT FOR STORAGE pv_storage = PriceVolumeHistoryStorage(ticker=ticker, exchange=exchange, timestamp=unix_timestamp) publish_close_price = timestamp_is_near_5min(unix_timestamp) if ph_object.volume and ph_object.volume > 0: pv_storage.index = "close_volume" pv_storage.value = ph_object.volume pipeline = pv_storage.save(publish=publish_close_price, pipeline=pipeline) if ph_object.open_p and ph_object.open_p > 0: pv_storage.index = "open_price" pv_storage.value = ph_object.open_p pipeline = pv_storage.save(publish=False, pipeline=pipeline) if ph_object.high and ph_object.high > 0: pv_storage.index = "high_price" pv_storage.value = ph_object.high pipeline = pv_storage.save(publish=False, pipeline=pipeline) if ph_object.low and ph_object.low > 0: pv_storage.index = "low_price" pv_storage.value = ph_object.low pipeline = pv_storage.save(publish=False, pipeline=pipeline) # always run 'close_price' index last # why? when it saves, it triggers price storage to resample # after resampling history indexes are deleted # so all others should be available for resampling before being deleted if ph_object.close and ph_object.close > 0: pv_storage.index = "close_price" pv_storage.value = ph_object.close pipeline = pv_storage.save(publish=True, pipeline=pipeline) if using_local_pipeline: return pipeline.execute() else: return pipeline ### END PULL OF PRICE HISTORY RECORDS ###
def put(self, request, ticker, format=None): """ This should receive a resampled price for the upcoming or nearly past 5min period where timestamp is divisible by 300s (5 min) and represents a resampled data point for :return: """ ticker = ticker or request.data.get('ticker') exchange = request.data.get('exchange') timestamp = request.data.get('timestamp') # SAVE VALUES IN REDIS USING PriceVolumeHistoryStorage OBJECT pipeline = database.pipeline() # transaction=False # CREATE OBJECT FOR STORAGE data_history = PriceVolumeHistoryStorage(ticker=ticker, exchange=exchange, timestamp=timestamp) data_history_objects = {} try: if "_ETH" in ticker: return Response( { 'success': f'not assessing ETH base tickers, 0 db entries created' }, status=status.HTTP_202_ACCEPTED) if "_BTC" in ticker: close_volume = int(float(request.data["close_volume"])) close_price = int(float(request.data["close_price"])) if close_volume and close_price and ( close_volume * close_price) < 50: # less than 50 BTC volume return Response( { 'success': f'volume {close_volume} x {close_price} is low, 0 db entries created' }, status=status.HTTP_202_ACCEPTED) for index in default_price_indexes + default_volume_indexes: if not request.data.get(index): continue index_value = int(float(request.data[index])) if index in default_price_indexes: index_value = index_value * (10**8) if index_value > 0: data_history.index = index data_history.value = int(index_value) # ensure the object stays separate in memory # (because saving is pipelined) data_history_objects[index] = data_history # add the saving of this object to the pipeline pipeline = data_history_objects[index].save( publish=True, pipeline=pipeline) database_response = pipeline.execute() return Response( { 'success': f'{sum(database_response)} ' f'db entries created and TA subscribers received' }, status=status.HTTP_201_CREATED) except Exception as e: logger.error(str(e)) return Response({'error': str(e)}, status=status.HTTP_501_NOT_IMPLEMENTED)
def test_query_tolerance(self): query_results = PriceVolumeHistoryStorage.query(ticker=ticker1, exchange=exchange, index=index, timestamp=timestamp, periods_range=1, timestamp_tolerance=29) self.assertLess(len(query_results['values']), 7)
def instantiate_old_price_history(): old_price_history = PriceVolumeHistoryStorage( ticker=ticker1, exchange=exchange, timestamp=JAN_1_2017_TIMESTAMP - 1, # too old index=index, value=value )
def setUp(self): self.price_history = PriceVolumeHistoryStorage( ticker=ticker1, exchange=exchange, timestamp=timestamp, index=index, value=value )
def generate_pv_storages(ticker: str, exchange: str, index: str, score: float) -> bool: """ resample values from PriceVolumeHistoryStorage into 5min periods in PriceStorage and VolumeStorage :param ticker: eg. "ETH_BTC" :param exchange: eg. "binance" :param index: eg. "close_price" :param score: as defined by TimeseriesStorage.score_from_timestamp() :return: True if successful at generating a new storage index value for the score, else False """ score = get_nearest_5min_score(score) timestamp = TimeseriesStorage.timestamp_from_score(score) if index in PRICE_INDEXES: storage = PriceStorage(ticker=ticker, exchange=exchange, timestamp=timestamp, index=index) elif index in VOLUME_INDEXES: storage = VolumeStorage(ticker=ticker, exchange=exchange, timestamp=timestamp, index=index) else: logger.error("I don't know what kind of index this is") return False # logger.debug(f'process price for ticker: {ticker} and index: {index}') # eg. key_format = f'{ticker}:{exchange}:PriceVolumeHistoryStorage:{index}' query_results = PriceVolumeHistoryStorage.query(ticker=ticker, exchange=exchange, index=index, timestamp=timestamp, periods_range=1, timestamp_tolerance=29) if not query_results['values_count']: return False # logger.debug("results from PriceVolumeHistoryStorage query... ") # logger.debug(query_results) try: index_values = [int(v) for v in query_results['values']] # timestamps = [int(v) for v in query_results['timestamps']] if not len(index_values): storage.value = None elif index == "close_volume": storage.value = index_values[-1] elif index == "open_price": storage.value = index_values[0] elif index == "low_price": storage.value = min(index_values) elif index == "high_price": storage.value = max(index_values) elif index == "close_price": storage.value = index_values[-1] else: raise IndexError("unknown index)") except IndexError: return False # couldn't find a useful value or index (sorry for using this for both definitions of "index") except ValueError: return False # couldn't find a useful value except Exception as e: logger.error("there's a bug here: " + str(e)) return False if storage.value: storage.save(publish=bool(index == "close_price")) # logger.info("saved new thing: " + storage.get_db_key()) if index == 'close_volume': # todo: for index in derived_volume_indexes: calculate and save volume index pass if index == "close_price": all_values_set = set(index_values) # these are the close prices for other_index in ["open_price", "low_price", "high_price"]: query_results = PriceVolumeHistoryStorage.query( ticker=ticker, exchange=exchange, index=other_index, timestamp=timestamp, periods_range=1, timestamp_tolerance=29) index_values = [int(v) for v in query_results['values']] all_values_set = all_values_set | set(index_values) if not len(all_values_set): logger.error( "This shouldn't be possible. Serious bug if you see this!") return False for d_index in derived_price_indexes: price_storage = PriceStorage(ticker=ticker, exchange=exchange, timestamp=timestamp, index=d_index) values_set = all_values_set.copy() if d_index == "midpoint_price": while len(values_set) > 2: values_set.remove(max(values_set)) values_set.remove(min(values_set)) price_storage.value = values_set.pop() elif d_index == "mean_price": price_storage.value = sum(values_set) / (len(values_set) or 1) elif d_index == "price_variance": # this is too small of a period size to measure variance pass if price_storage.value: price_storage.save() return True