def get_current_data(self, counter: int = 0) -> Dict[str, Union[str, float]]: """ Retrieves current market dictionary with open, high, low, close prices. :param counter: Counter to check how many times bot is trying to retrieve current data. :return: A dictionary with current open, high, low, and close prices. """ try: self.remove_past_data_if_needed() if not self.data_is_updated(): self.update_data() currentInterval = self.data[-1]['date_utc'] + timedelta(minutes=self.get_interval_minutes()) currentTimestamp = int(currentInterval.timestamp() * 1000) nextInterval = currentInterval + timedelta(minutes=self.get_interval_minutes()) nextTimestamp = int(nextInterval.timestamp() * 1000) - 1 currentData = self.binanceClient.get_klines(symbol=self.symbol, interval=self.interval, startTime=currentTimestamp, endTime=nextTimestamp, )[0] self.current_values = get_normalized_data(data=currentData, date_in_utc=currentInterval) if counter > 0: self.try_callback("Successfully reconnected.") return self.current_values except Exception as e: sleepTime = 5 + counter * 2 error_message = f"Error: {e}. Retrying in {sleepTime} seconds..." self.output_message(error_message, 4) self.try_callback(f"Internet connectivity issue detected. Trying again in {sleepTime} seconds.") self.ema_dict = {} # Reset EMA cache as it could be corrupted. time.sleep(sleepTime) return self.get_current_data(counter=counter + 1)
def insert_data(self, new_data: List[List[Union[str, datetime]]]): """ Inserts data from new_data to run-time data. :param new_data: List with new data values. """ for data in new_data: data[0] = self.get_utc_datetime_from_timestamp(data[0]) current_dict = get_normalized_data(data=data) self.data.append(current_dict)
def insert_data(self, newData: List[List[str]]): """ Inserts data from newData to run-time data. :param newData: List with new data values. """ for data in newData: parsedDate = datetime.fromtimestamp(int(data[0]) / 1000, tz=timezone.utc) current_dict = get_normalized_data(data=data, date_in_utc=parsedDate) self.data.append(current_dict)
def get_rows(): with closing(sqlite3.connect(DATABASE_FILE_PATH)) as connection: with closing(connection.cursor()) as cursor: db_rows = cursor.execute( f"SELECT * FROM {DATABASE_TABLE} ORDER BY date_utc" ).fetchall() return [ get_normalized_data(row, parse_date=True) for row in db_rows ]
def get_normalized_csv_data() -> List[Dict[str, Union[float, datetime]]]: """ Get normalized CSV data in typical Algobot fashion. :return: Normalized list of dictionaries. """ csv_data = get_csv_data(headers=False) normalized_data = [] for data in csv_data: split_data = data.split(',') normalized_dict = get_normalized_data(split_data, parse_date=True) normalized_data.append(normalized_dict) return normalized_data
def get_latest_database_row(self) -> Dict[str, Union[float, datetime]]: """ Returns the latest row from database table. :return: Latest row data in a dictionary. """ with closing(sqlite3.connect(self.database_file)) as connection: with closing(connection.cursor()) as cursor: cursor.execute( f'SELECT * FROM {self.database_table} ORDER BY date_utc DESC LIMIT 1' ) fetched_values = cursor.fetchone() if fetched_values is not None: return get_normalized_data(fetched_values, parse_date=True) return {}
def snoop(self): """ Run snooper functionality. """ self.validate() self.signals.activity.emit('Starting the volatility snooper...') volatility_dict = {} for index, ticker in enumerate(self.tickers): if not self.running: break self.signals.activity.emit(f"Gathering volatility for {ticker}...") self.signals.progress.emit(int(index / len(self.tickers) * 100)) data = self.binanceClient.get_historical_klines( ticker, self.short_interval, self.get_starting_timestamp()) data_length = len(data) multiplier = 2 impossible = False while len(data) < self.periods + 1: starting_timestamp = self.get_starting_timestamp( multiplier=multiplier) data = self.binanceClient.get_historical_klines( ticker, self.short_interval, starting_timestamp) multiplier += 1 if len(data) == data_length: impossible = True break data_length = len(data) if impossible: volatility_dict[ ticker] = "Not enough data. Maybe the ticker is too new." else: data = [get_normalized_data(d) for d in data] volatility_dict[ticker] = self.volatility_func( periods=self.periods, data=data) return volatility_dict
def get_data_from_database(self): """ Loads data from database and appends it to run-time data. """ with closing(sqlite3.connect(self.databaseFile)) as connection: with closing(connection.cursor()) as cursor: rows = cursor.execute(f''' SELECT "date_utc", "open_price", "high_price", "low_price", "close_price", "volume", "quote_asset_volume", "number_of_trades", "taker_buy_base_asset", "taker_buy_quote_asset" FROM {self.databaseTable} ORDER BY date_utc ''').fetchall() if len(rows) > 0: self.output_message("Retrieving data from database...") else: self.output_message("No data found in database.") return for row in rows: date_utc = datetime.strptime(row[0], '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) normalized_data = get_normalized_data(data=row, date_in_utc=date_utc) self.data.append(normalized_data)
def get_data_from_database( self, limit: int = None) -> List[Dict[str, Union[float, datetime]]]: """ Loads data from database and appends it to run-time data. :param limit: Limit amount of rows to fetch. :return: Data from database in a list of dictionaries. """ with closing(sqlite3.connect(self.database_file)) as connection: with closing(connection.cursor()) as cursor: query = f'SELECT * FROM {self.database_table} ORDER BY date_utc' if limit is not None: query += f' DESC LIMIT {limit}' rows = cursor.execute(query).fetchall() if limit is not None: rows = rows[:: -1] # Reverse data because we want latest dates in the end. return [get_normalized_data(data=row, parse_date=True) for row in rows]
def test_get_normalized_data(data, expected): """ Test get normalized data functionality. """ assert get_normalized_data( data) == expected, f"Expected normalized data to be: {expected}."
def test_get_normalized_data(data, date_in_utc, expected): assert get_normalized_data( data, date_in_utc ) == expected, f"Expected normalized data to be: {expected}."