def store_candle_into_db(exchange: str, symbol: str, candle: np.ndarray, on_conflict='ignore') -> None: from jesse.models.Candle import Candle d = { 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': exchange, 'timestamp': candle[0], 'open': candle[1], 'high': candle[3], 'low': candle[4], 'close': candle[2], 'volume': candle[5] } if on_conflict == 'ignore': Candle.insert(**d).on_conflict_ignore().execute() elif on_conflict == 'replace': Candle.insert(**d).on_conflict( conflict_target=['timestamp', 'symbol', 'exchange'], preserve=(Candle.open, Candle.high, Candle.low, Candle.close, Candle.volume), ).execute() elif on_conflict == 'error': Candle.insert(**d).execute() else: raise Exception(f'Unknown on_conflict value: {on_conflict}')
def __init__(self): self.id = jh.generate_unique_id() self.name = None self.symbol = None self.exchange = None self.timeframe = None self.hp = None self.index = 0 self.vars = {} self.buy = None self._buy = None self.sell = None self._sell = None self.stop_loss = None self._stop_loss = None self.take_profit = None self._take_profit = None self._log_take_profit = None self._log_stop_loss = None self._open_position_orders = [] self._stop_loss_orders = [] self._take_profit_orders = [] self.trade = None self.trades_count = 0 self._initial_qty = None self._is_executing = False self._is_initiated = False self.position = None self.broker = None
def store_candle_into_db(exchange: str, symbol: str, candle: np.ndarray): """ store candle into the database """ d = { 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': exchange, 'timestamp': candle[0], 'open': candle[1], 'high': candle[3], 'low': candle[4], 'close': candle[2], 'volume': candle[5] } def async_save(): Candle.insert(**d).on_conflict_ignore().execute() print( jh.color( 'candle: {}-{}-{}: {}'.format(jh.timestamp_to_time(d['timestamp']), exchange, symbol, candle), 'blue' ) ) # async call threading.Thread(target=async_save).start()
def fetch(self, symbol, start_timestamp): # since Bitfinex API skips candles with "volume=0", we have to send end_timestamp # instead of limit. Therefore, we use limit number to calculate the end_timestamp end_timestamp = start_timestamp + (self.count - 1) * 60000 payload = { 'start': start_timestamp, 'end': end_timestamp, 'limit': self.count, 'sort': 1 } response = requests.get(self.endpoint + '/trade:1m:t{}/hist'.format(symbol), params=payload) data = response.json() candles = [] for d in data: candles.append({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': d[0], 'open': d[1], 'close': d[2], 'high': d[3], 'low': d[4], 'volume': d[5] }) return candles
def fetch(self, symbol: str, start_timestamp: int) -> list: end_timestamp = start_timestamp + (self.count - 1) * 60000 payload = { 'resolution': 60, 'start_time': start_timestamp / 1000, 'end_time': end_timestamp / 1000, } formatted_symbol = symbol.replace('USD', 'PERP') response = requests.get( f'https://ftx.com/api/markets/{formatted_symbol}/candles', params=payload) self._handle_errors(response) data = response.json()['result'] return [{ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': int(d['time']), 'open': float(d['open']), 'close': float(d['close']), 'high': float(d['high']), 'low': float(d['low']), 'volume': float(d['volume']) } for d in data]
def fetch(self, symbol: str, start_timestamp: int): """ note1: unlike Bitfinex, Binance does NOT skip candles with volume=0. note2: like Bitfinex, start_time includes the candle and so does the end_time. """ end_timestamp = start_timestamp + (self.count - 1) * 60000 payload = { 'granularity': '60', 'start': jh.timestamp_to_time(start_timestamp), 'end': jh.timestamp_to_time(end_timestamp), } response = requests.get(self.endpoint + '/{}/candles'.format(symbol), params=payload) self._handle_errors(response) data = response.json() candles = [] for d in data: candles.append({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': int(d[0]) * 1000, 'open': float(d[3]), 'close': float(d[4]), 'high': float(d[2]), 'low': float(d[1]), 'volume': float(d[5]) }) return candles
def info(msg: str, send_notification=False) -> None: if jh.app_mode() not in LOGGERS: _init_main_logger() msg = str(msg) from jesse.store import store log_id = jh.generate_unique_id() log_dict = { 'id': log_id, 'timestamp': jh.now_to_timestamp(), 'message': msg } store.logs.info.append(log_dict) if jh.is_collecting_data() or jh.is_live(): sync_publish('info_log', log_dict) if jh.is_live() or (jh.is_backtesting() and jh.is_debugging()): msg = f"[INFO | {jh.timestamp_to_time(jh.now_to_timestamp())[:19]}] {msg}" logger = LOGGERS[jh.app_mode()] logger.info(msg) if jh.is_live(): from jesse.models.utils import store_log_into_db store_log_into_db(log_dict, 'info') if send_notification: notify(msg)
def store_trade_into_db(exchange: str, symbol: str, trade: np.ndarray): d = { 'id': jh.generate_unique_id(), 'timestamp': trade[0], 'price': trade[1], 'buy_qty': trade[2], 'sell_qty': trade[3], 'buy_count': trade[4], 'sell_count': trade[5], 'symbol': symbol, 'exchange': exchange, } def async_save(): Trade.insert(**d).on_conflict_ignore().execute() print( jh.color( 'trade: {}-{}-{}: {}'.format( jh.timestamp_to_time(d['timestamp']), exchange, symbol, trade ), 'green' ) ) # async call threading.Thread(target=async_save).start()
def store_orderbook_into_db(exchange: str, symbol: str, orderbook: np.ndarray): """ :param exchange: :param symbol: :param orderbook: """ d = { 'id': jh.generate_unique_id(), 'timestamp': jh.now_to_timestamp(), 'data': orderbook.dumps(), 'symbol': symbol, 'exchange': exchange, } def async_save(): Orderbook.insert(**d).on_conflict_ignore().execute() print( jh.color( 'orderbook: {}-{}-{}: [{}, {}], [{}, {}]'.format( jh.timestamp_to_time(d['timestamp']), exchange, symbol, # best ask orderbook[0][0][0], orderbook[0][0][1], # best bid orderbook[1][0][0], orderbook[1][0][1] ), 'magenta' ) ) # async call threading.Thread(target=async_save).start()
def error(msg: str) -> None: if jh.app_mode() not in LOGGERS: _init_main_logger() # error logs should be logged as info logs as well info(msg) msg = str(msg) from jesse.store import store log_id = jh.generate_unique_id() log_dict = { 'id': log_id, 'timestamp': jh.now_to_timestamp(), 'message': msg } if jh.is_live() and jh.get_config('env.notifications.events.errors', True): # notify_urgently(f"ERROR at \"{jh.get_config('env.identifier')}\" account:\n{msg}") notify_urgently(f"ERROR:\n{msg}") notify(f'ERROR:\n{msg}') if (jh.is_backtesting() and jh.is_debugging()) or jh.is_collecting_data() or jh.is_live(): sync_publish('error_log', log_dict) store.logs.errors.append(log_dict) if jh.is_live() or jh.is_optimizing(): msg = f"[ERROR | {jh.timestamp_to_time(jh.now_to_timestamp())[:19]}] {msg}" logger = LOGGERS[jh.app_mode()] logger.error(msg) if jh.is_live(): from jesse.models.utils import store_log_into_db store_log_into_db(log_dict, 'error')
def store_candles(candles: np.ndarray, exchange: str, symbol: str) -> None: """ Stores candles in the database. The stored data can later be used for being fetched again via get_candles or even for running backtests on them. A common use case for this function is for importing candles from a CSV file so you can later use them for backtesting. """ from jesse.services.db import store_candles as store_candles_from_list import jesse.helpers as jh # check if .env file exists if not jh.is_jesse_project(): raise FileNotFoundError( 'Invalid directory: ".env" file not found. To use Jesse inside notebooks, create notebooks inside the root of a Jesse project.' ) # TODO: add validation for timeframe to make sure it's `1m` arr = [{ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': exchange, 'timestamp': c[0], 'open': c[1], 'close': c[2], 'high': c[3], 'low': c[4], 'volume': c[5] } for c in candles] store_candles_from_list(arr)
def stop_order(self, symbol, qty, price, side, role, flags): """ :param symbol: :param qty: :param price: :param side: :param role: :param flags: :return: """ order = Order({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'side': side, 'type': order_types.STOP, 'flag': self.get_exec_inst(flags), 'qty': jh.prepare_qty(qty, side), 'price': price, 'role': role }) store.orders.add_order(order) return order
def market_order(self, symbol, qty, current_price, side, role, flags): """ :param symbol: :param qty: :param current_price: :param side: :param role: :param flags: :return: """ order = Order({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'side': side, 'type': order_types.MARKET, 'flag': self.get_exec_inst(flags), 'qty': jh.prepare_qty(qty, side), 'price': current_price, 'role': role }) store.orders.add_order(order) store.orders.to_execute.append(order) return order
def store_ticker_into_db(exchange: str, symbol: str, ticker: np.ndarray): """ :param exchange: :param symbol: :param ticker: """ d = { 'id': jh.generate_unique_id(), 'timestamp': ticker[0], 'last_price': ticker[1], 'high_price': ticker[2], 'low_price': ticker[3], 'volume': ticker[4], 'symbol': symbol, 'exchange': exchange, } def async_save(): Ticker.insert(**d).on_conflict_ignore().execute() print( jh.color( 'ticker: {}-{}-{}: {}'.format( jh.timestamp_to_time(d['timestamp']), exchange, symbol, ticker), 'yellow')) # async call threading.Thread(target=async_save).start()
def _check_for_liquidations(candle: np.ndarray, exchange: str, symbol: str) -> None: p: Position = selectors.get_position(exchange, symbol) if not p: return # for now, we only support the isolated mode: if p.mode != 'isolated': return if candle_includes_price(candle, p.liquidation_price): closing_order_side = jh.closing_side(p.type) # create the market order that is used as the liquidation order order = Order({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': exchange, 'side': closing_order_side, 'type': order_types.MARKET, 'flag': order_flags.REDUCE_ONLY, 'qty': jh.prepare_qty(p.qty, closing_order_side), 'price': p.bankruptcy_price, 'role': order_roles.CLOSE_POSITION }) store.orders.add_order(order) store.app.total_liquidations += 1 logger.info(f'{p.symbol} liquidated at {p.liquidation_price}') order.execute()
def fetch(self, symbol: str, start_timestamp: int) -> list: # since Bitfinex API skips candles with "volume=0", we have to send end_timestamp # instead of limit. Therefore, we use limit number to calculate the end_timestamp end_timestamp = start_timestamp + (self.count - 1) * 60000 payload = { 'start': start_timestamp, 'end': end_timestamp, 'limit': self.count, 'sort': 1 } dashless_symbol = jh.dashless_symbol(symbol) response = requests.get( f"{self.endpoint}/trade:1m:t{dashless_symbol}/hist", params=payload ) data = response.json() return [{ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': d[0], 'open': d[1], 'close': d[2], 'high': d[3], 'low': d[4], 'volume': d[5] } for d in data]
def __init__(self, exchange_name: str, symbol: str, attributes=None) -> None: self.id = jh.generate_unique_id() self.entry_price = None self.exit_price = None self.current_price = None self.qty = 0 self.opened_at = None self.closed_at = None # TODO: self._mark_price = None if attributes is None: attributes = {} self.exchange_name = exchange_name self.exchange: Exchange = selectors.get_exchange(self.exchange_name) self.symbol = symbol self.strategy = None for a in attributes: setattr(self, a, attributes[a])
def store_trade_into_db(exchange: str, symbol: str, trade: np.ndarray) -> None: return d = { 'id': jh.generate_unique_id(), 'timestamp': trade[0], 'price': trade[1], 'buy_qty': trade[2], 'sell_qty': trade[3], 'buy_count': trade[4], 'sell_count': trade[5], 'symbol': symbol, 'exchange': exchange, } def async_save() -> None: Trade.insert(**d).on_conflict_ignore().execute() print( jh.color( f'trade: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {trade}', 'green' ) ) # async call threading.Thread(target=async_save).start()
def fetch(self, symbol, start_timestamp): payload = { 'interval': '1', 'pair': symbol, 'since': start_timestamp / 1000, } response = requests.get(self.endpoint, params=payload) self._handle_errors(response) data = response.json()["result"][self._topair(symbol)] candles = [] for d in data: candles.append({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': int(d[0]) * 1000, 'open': float(d[1]), 'close': float(d[4]), 'high': float(d[2]), 'low': float(d[3]), 'volume': float(d[6]) }) return candles
def save_daily_portfolio_balance() -> None: balances = [] # add exchange balances for key, e in store.exchanges.storage.items(): balances.append(e.assets[jh.app_currency()]) # store daily_balance of assets into database if jh.is_livetrading(): for asset_key, asset_value in e.assets.items(): store_daily_balance_into_db({ 'id': jh.generate_unique_id(), 'timestamp': jh.now(), 'identifier': jh.get_config('env.identifier', 'main'), 'exchange': e.name, 'asset': asset_key, 'balance': asset_value, }) # add open position values for key, pos in store.positions.storage.items(): if pos.is_open: balances.append(pos.pnl) total = sum(balances) store.app.daily_balance.append(total) logger.info('Saved daily portfolio balance: {}'.format(round(total, 2)))
def fake_order(attributes=None): if attributes is None: attributes = {} global first_timestamp first_timestamp += 60000 exchange = exchanges.SANDBOX symbol = 'BTCUSD' side = sides.BUY order_type = order_types.LIMIT price = randint(40, 100) qty = randint(1, 10) status = order_statuses.ACTIVE created_at = first_timestamp return Order({ "id": jh.generate_unique_id(), 'symbol': attributes.get('symbol', symbol), 'exchange': attributes.get('exchange', exchange), 'side': attributes.get('side', side), 'type': attributes.get('type', order_type), 'qty': attributes.get('qty', qty), 'price': attributes.get('price', price), 'status': attributes.get('status', status), 'created_at': attributes.get('created_at', created_at), })
def broadcast_error_without_logging(msg: str): msg = str(msg) sync_publish('error_log', { 'id': jh.generate_unique_id(), 'timestamp': jh.now_to_timestamp(), 'message': msg })
def get_config(client_config: dict, has_live=False) -> dict: from jesse.services.db import database database.open_connection() from jesse.models.Option import Option try: o = Option.get(Option.type == 'config') # merge it with client's config (because it could include new keys added), # update it in the database, and then return it data = jh.merge_dicts(client_config, json.loads(o.json)) # make sure the list of BACKTEST exchanges is up to date from jesse.modes.import_candles_mode.drivers import drivers for k in list(data['backtest']['exchanges'].keys()): if k not in drivers: del data['backtest']['exchanges'][k] # make sure the list of LIVE exchanges is up to date if has_live: from jesse_live.info import SUPPORTED_EXCHANGES_NAMES live_exchanges = list(sorted(SUPPORTED_EXCHANGES_NAMES)) for k in list(data['live']['exchanges'].keys()): if k not in live_exchanges: del data['live']['exchanges'][k] # fix the settlement_currency of exchanges for k, e in data['live']['exchanges'].items(): e['settlement_currency'] = jh.get_settlement_currency_from_exchange( e['name']) for k, e in data['backtest']['exchanges'].items(): e['settlement_currency'] = jh.get_settlement_currency_from_exchange( e['name']) o.updated_at = jh.now() o.save() except peewee.DoesNotExist: # if not found, that means it's the first time. Store in the DB and # then return what was sent from the client side without changing it o = Option({ 'id': jh.generate_unique_id(), 'updated_at': jh.now(), 'type': 'config', 'json': json.dumps(client_config) }) o.save(force_insert=True) data = client_config database.close_connection() return {'data': data}
def store_candles(candles: np.ndarray, exchange: str, symbol: str) -> None: arr = [{ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': exchange, 'timestamp': c[0], 'open': c[1], 'close': c[2], 'high': c[3], 'low': c[4], 'volume': c[5] } for c in candles] store_candles_from_list(arr)
def fetch(self, symbol, start_timestamp): """ :param symbol: :param start_timestamp: :return: """ end_timestamp = start_timestamp + (self.count - 1) * 60000 payload = { 'interval': '1m', 'symbol': symbol, 'startTime': start_timestamp, 'endTime': end_timestamp, 'limit': self.count, } response = requests.get(self.endpoint, params=payload) # Exchange In Maintenance if response.status_code == 502: raise exceptions.ExchangeInMaintenance( 'ERROR: 502 Bad Gateway. Please try again later') # unsupported symbol if response.status_code == 400: raise ValueError(response.json()['msg']) if response.status_code != 200: return data = response.json() candles = [] for d in data: candles.append({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': int(d[0]), 'open': float(d[1]), 'close': float(d[4]), 'high': float(d[2]), 'low': float(d[3]), 'volume': float(d[5]) }) return candles
def fetch(self, symbol, start_timestamp): """ note1: unlike Bitfinex, Binance does NOT skip candles with volume=0. note2: like Bitfinex, start_time includes the candle and so does the end_time. """ end_timestamp = start_timestamp + (self.count - 1) * 60000 dashless_symbol = jh.dashless_symbol(symbol) payload = { 'interval': '1m', 'symbol': dashless_symbol, 'startTime': start_timestamp, 'endTime': end_timestamp, 'limit': self.count, } response = requests.get(self.endpoint, params=payload) # Exchange In Maintenance if response.status_code == 502: raise exceptions.ExchangeInMaintenance( 'ERROR: 502 Bad Gateway. Please try again later') # unsupported symbol if response.status_code == 400: raise ValueError(response.json()['msg']) if response.status_code != 200: return data = response.json() candles = [] for d in data: candles.append({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'timestamp': int(d[0]), 'open': float(d[1]), 'close': float(d[4]), 'high': float(d[2]), 'low': float(d[3]), 'volume': float(d[5]) }) return candles
def test_generate_unique_id(): from uuid import UUID uuid_string = jh.generate_unique_id() try: val = UUID(uuid_string, version=4) except ValueError: # If it's a value error, then the string # is not a valid hex code for a UUID. return False # If the uuid_string is a valid hex code, # but an invalid uuid4, # the UUID.__init__ will convert it to a # valid uuid4. This is bad for validation purposes. assert val.hex == uuid_string.replace('-', '')
def stop_order(self, symbol: str, qty: float, price: float, side: str, role: str, flags: list) -> Order: order = Order({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'side': side, 'type': order_types.STOP, 'flag': self.get_exec_inst(flags), 'qty': jh.prepare_qty(qty, side), 'price': price, 'role': role }) store.orders.add_order(order) return order
def limit_order(self, symbol, qty, price, side, role, flags): order = Order({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'side': side, 'type': order_types.LIMIT, 'flag': self.get_exec_inst(flags), 'qty': jh.prepare_qty(qty, side), 'price': price, 'role': role }) store.orders.add_order(order) return order
def market_order(self, symbol: str, qty: float, current_price: float, side: str, role: str, flags: list) -> Order: order = Order({ 'id': jh.generate_unique_id(), 'symbol': symbol, 'exchange': self.name, 'side': side, 'type': order_types.MARKET, 'flag': self.get_exec_inst(flags), 'qty': jh.prepare_qty(qty, side), 'price': current_price, 'role': role }) store.orders.add_order(order) store.orders.to_execute.append(order) return order