def info(msg: str, send_notification=False) -> None: if jh.app_mode() not in LOGGERS: _init_main_logger() msg = str(msg) from jesse.store import store log_id = jh.generate_unique_id() log_dict = { 'id': log_id, 'timestamp': jh.now_to_timestamp(), 'message': msg } store.logs.info.append(log_dict) if jh.is_collecting_data() or jh.is_live(): sync_publish('info_log', log_dict) if jh.is_live() or (jh.is_backtesting() and jh.is_debugging()): msg = f"[INFO | {jh.timestamp_to_time(jh.now_to_timestamp())[:19]}] {msg}" logger = LOGGERS[jh.app_mode()] logger.info(msg) if jh.is_live(): from jesse.models.utils import store_log_into_db store_log_into_db(log_dict, 'info') if send_notification: notify(msg)
def error(msg: str) -> None: if jh.app_mode() not in LOGGERS: _init_main_logger() # error logs should be logged as info logs as well info(msg) msg = str(msg) from jesse.store import store log_id = jh.generate_unique_id() log_dict = { 'id': log_id, 'timestamp': jh.now_to_timestamp(), 'message': msg } if jh.is_live() and jh.get_config('env.notifications.events.errors', True): # notify_urgently(f"ERROR at \"{jh.get_config('env.identifier')}\" account:\n{msg}") notify_urgently(f"ERROR:\n{msg}") notify(f'ERROR:\n{msg}') if (jh.is_backtesting() and jh.is_debugging()) or jh.is_collecting_data() or jh.is_live(): sync_publish('error_log', log_dict) store.logs.errors.append(log_dict) if jh.is_live() or jh.is_optimizing(): msg = f"[ERROR | {jh.timestamp_to_time(jh.now_to_timestamp())[:19]}] {msg}" logger = LOGGERS[jh.app_mode()] logger.error(msg) if jh.is_live(): from jesse.models.utils import store_log_into_db store_log_into_db(log_dict, 'error')
def broadcast_error_without_logging(msg: str): msg = str(msg) sync_publish('error_log', { 'id': jh.generate_unique_id(), 'timestamp': jh.now_to_timestamp(), 'message': msg })
def terminate_session(): sync_publish('unexpectedTermination', { 'message': "Session terminated as the result of an uncaught exception", }) jesse_logger.error( 'Session terminated as the result of an uncaught exception') jh.terminate_app()
def run(self): try: mp.Process.run(self) except Exception as e: if type(e).__name__ == 'Termination': sync_publish('termination', {}) jh.terminate_app() else: sync_publish( 'exception', { 'error': f'{type(e).__name__}: {e}', 'traceback': str(traceback.format_exc()), }, ) print('Unhandled exception in the process:') print(traceback.format_exc()) terminate_session()
def handle_thread_exception(args) -> None: if args.exc_type == SystemExit: return if args.exc_type.__name__ == 'Termination': sync_publish('termination', {}) jh.terminate_app() else: # send notifications if it's a live session if jh.is_live(): jesse_logger.error( f'{args.exc_type.__name__}: {args.exc_value}') jesse_logger.info(str(traceback.format_exc())) sync_publish( 'exception', { 'error': f"{args.exc_type.__name__}: {str(args.exc_value)}", 'traceback': str(traceback.format_exc()) }) terminate_session()
def run( debug_mode, user_config: dict, routes: List[Dict[str, str]], extra_routes: List[Dict[str, str]], start_date: str, finish_date: str, candles: dict = None, chart: bool = False, tradingview: bool = False, full_reports: bool = False, csv: bool = False, json: bool = False ) -> None: if not jh.is_unit_testing(): # at every second, we check to see if it's time to execute stuff status_checker = Timeloop() @status_checker.job(interval=timedelta(seconds=1)) def handle_time(): if process_status() != 'started': raise exceptions.Termination status_checker.start() from jesse.config import config, set_config config['app']['trading_mode'] = 'backtest' # debug flag config['app']['debug_mode'] = debug_mode # inject config if not jh.is_unit_testing(): set_config(user_config) # set routes router.initiate(routes, extra_routes) store.app.set_session_id() register_custom_exception_handler() # clear the screen if not jh.should_execute_silently(): click.clear() # validate routes validate_routes(router) # initiate candle store store.candles.init_storage(5000) # load historical candles if candles is None: candles = load_candles(start_date, finish_date) click.clear() if not jh.should_execute_silently(): sync_publish('general_info', { 'session_id': jh.get_session_id(), 'debug_mode': str(config['app']['debug_mode']), }) # candles info key = f"{config['app']['considering_candles'][0][0]}-{config['app']['considering_candles'][0][1]}" sync_publish('candles_info', stats.candles_info(candles[key]['candles'])) # routes info sync_publish('routes_info', stats.routes(router.routes)) # run backtest simulation simulator(candles, run_silently=jh.should_execute_silently()) # hyperparameters (if any) if not jh.should_execute_silently(): sync_publish('hyperparameters', stats.hyperparameters(router.routes)) if not jh.should_execute_silently(): if store.completed_trades.count > 0: sync_publish('metrics', report.portfolio_metrics()) routes_count = len(router.routes) more = f"-and-{routes_count - 1}-more" if routes_count > 1 else "" study_name = f"{router.routes[0].strategy_name}-{router.routes[0].exchange}-{router.routes[0].symbol}-{router.routes[0].timeframe}{more}-{start_date}-{finish_date}" store_logs(study_name, json, tradingview, csv) if chart: charts.portfolio_vs_asset_returns(study_name) sync_publish('equity_curve', charts.equity_curve()) # QuantStats' report if full_reports: price_data = [] # load close candles for Buy and hold and calculate pct_change for index, c in enumerate(config['app']['considering_candles']): exchange, symbol = c[0], c[1] if exchange in config['app']['trading_exchanges'] and symbol in config['app']['trading_symbols']: # fetch from database candles_tuple = Candle.select( Candle.timestamp, Candle.close ).where( Candle.timestamp.between(jh.date_to_timestamp(start_date), jh.date_to_timestamp(finish_date) - 60000), Candle.exchange == exchange, Candle.symbol == symbol ).order_by(Candle.timestamp.asc()).tuples() candles = np.array(candles_tuple) timestamps = candles[:, 0] price_data.append(candles[:, 1]) price_data = np.transpose(price_data) price_df = pd.DataFrame(price_data, index=pd.to_datetime(timestamps, unit="ms"), dtype=float).resample( 'D').mean() price_pct_change = price_df.pct_change(1).fillna(0) bh_daily_returns_all_routes = price_pct_change.mean(1) quantstats.quantstats_tearsheet(bh_daily_returns_all_routes, study_name) else: sync_publish('equity_curve', None) sync_publish('metrics', None) # close database connection from jesse.services.db import database database.close_connection()
def simulator( candles: dict, run_silently: bool, hyperparameters: dict = None ) -> None: begin_time_track = time.time() key = f"{config['app']['considering_candles'][0][0]}-{config['app']['considering_candles'][0][1]}" first_candles_set = candles[key]['candles'] length = len(first_candles_set) # to preset the array size for performance try: store.app.starting_time = first_candles_set[0][0] except IndexError: raise IndexError('Check your "warm_up_candles" config value') store.app.time = first_candles_set[0][0] # initiate strategies for r in router.routes: # if the r.strategy is str read it from file if isinstance(r.strategy_name, str): StrategyClass = jh.get_strategy_class(r.strategy_name) # else it is a class object so just use it else: StrategyClass = r.strategy_name try: r.strategy = StrategyClass() except TypeError: raise exceptions.InvalidStrategy( "Looks like the structure of your strategy directory is incorrect. Make sure to include the strategy INSIDE the __init__.py file." "\nIf you need working examples, check out: https://github.com/jesse-ai/example-strategies" ) except: raise r.strategy.name = r.strategy_name r.strategy.exchange = r.exchange r.strategy.symbol = r.symbol r.strategy.timeframe = r.timeframe # read the dna from strategy's dna() and use it for injecting inject hyperparameters # first convert DNS string into hyperparameters if len(r.strategy.dna()) > 0 and hyperparameters is None: hyperparameters = jh.dna_to_hp(r.strategy.hyperparameters(), r.strategy.dna()) # inject hyperparameters sent within the optimize mode if hyperparameters is not None: r.strategy.hp = hyperparameters # init few objects that couldn't be initiated in Strategy __init__ # it also injects hyperparameters into self.hp in case the route does not uses any DNAs r.strategy._init_objects() selectors.get_position(r.exchange, r.symbol).strategy = r.strategy # add initial balance save_daily_portfolio_balance() progressbar = Progressbar(length, step=60) for i in range(length): # update time store.app.time = first_candles_set[i][0] + 60_000 # add candles for j in candles: short_candle = candles[j]['candles'][i] if i != 0: previous_short_candle = candles[j]['candles'][i - 1] short_candle = _get_fixed_jumped_candle(previous_short_candle, short_candle) exchange = candles[j]['exchange'] symbol = candles[j]['symbol'] store.candles.add_candle(short_candle, exchange, symbol, '1m', with_execution=False, with_generation=False) # print short candle if jh.is_debuggable('shorter_period_candles'): print_candle(short_candle, True, symbol) _simulate_price_change_effect(short_candle, exchange, symbol) # generate and add candles for bigger timeframes for timeframe in config['app']['considering_timeframes']: # for 1m, no work is needed if timeframe == '1m': continue count = jh.timeframe_to_one_minutes(timeframe) # until = count - ((i + 1) % count) if (i + 1) % count == 0: generated_candle = generate_candle_from_one_minutes( timeframe, candles[j]['candles'][(i - (count - 1)):(i + 1)]) store.candles.add_candle(generated_candle, exchange, symbol, timeframe, with_execution=False, with_generation=False) # update progressbar if not run_silently and i % 60 == 0: progressbar.update() sync_publish('progressbar', { 'current': progressbar.current, 'estimated_remaining_seconds': progressbar.estimated_remaining_seconds }) # now that all new generated candles are ready, execute for r in router.routes: count = jh.timeframe_to_one_minutes(r.timeframe) # 1m timeframe if r.timeframe == timeframes.MINUTE_1: r.strategy._execute() elif (i + 1) % count == 0: # print candle if jh.is_debuggable('trading_candles'): print_candle(store.candles.get_current_candle(r.exchange, r.symbol, r.timeframe), False, r.symbol) r.strategy._execute() # now check to see if there's any MARKET orders waiting to be executed store.orders.execute_pending_market_orders() if i != 0 and i % 1440 == 0: save_daily_portfolio_balance() if not run_silently: # print executed time for the backtest session finish_time_track = time.time() sync_publish('alert', { 'message': f'Successfully executed backtest simulation in: {round(finish_time_track - begin_time_track, 2)} seconds', 'type': 'success' }) for r in router.routes: r.strategy._terminate() store.orders.execute_pending_market_orders() # now that backtest is finished, add finishing balance save_daily_portfolio_balance()
def run(exchange: str, symbol: str, start_date_str: str, skip_confirmation: bool = False, mode: str = 'candles') -> None: config['app']['trading_mode'] = mode # first, create and set session_id store.app.set_session_id() register_custom_exception_handler() # close database connection from jesse.services.db import database database.open_connection() # at every second, we check to see if it's time to execute stuff status_checker = Timeloop() @status_checker.job(interval=timedelta(seconds=1)) def handle_time(): if process_status() != 'started': raise exceptions.Termination status_checker.start() try: start_timestamp = jh.arrow_to_timestamp( arrow.get(start_date_str, 'YYYY-MM-DD')) except: raise ValueError( 'start_date must be a string representing a date before today. ex: 2020-01-17' ) # more start_date validations today = arrow.utcnow().floor('day').int_timestamp * 1000 if start_timestamp == today: raise ValueError( "Today's date is not accepted. start_date must be a string a representing date BEFORE today." ) elif start_timestamp > today: raise ValueError( "Future's date is not accepted. start_date must be a string a representing date BEFORE today." ) # We just call this to throw a exception in case of a symbol without dash jh.quote_asset(symbol) click.clear() symbol = symbol.upper() until_date = arrow.utcnow().floor('day') start_date = arrow.get(start_timestamp / 1000) days_count = jh.date_diff_in_days(start_date, until_date) candles_count = days_count * 1440 try: driver: CandleExchange = drivers[exchange]() except KeyError: raise ValueError(f'{exchange} is not a supported exchange') except TypeError: raise FileNotFoundError('You are missing the "plugins.py" file') loop_length = int(candles_count / driver.count) + 1 # ask for confirmation if not skip_confirmation: click.confirm( f'Importing {days_count} days candles from "{exchange}" for "{symbol}". Duplicates will be skipped. All good?', abort=True, default=True) progressbar = Progressbar(loop_length) for i in range(candles_count): temp_start_timestamp = start_date.int_timestamp * 1000 temp_end_timestamp = temp_start_timestamp + (driver.count - 1) * 60000 # to make sure it won't try to import candles from the future! LOL if temp_start_timestamp > jh.now_to_timestamp(): break # prevent duplicates calls to boost performance count = Candle.select().where( Candle.timestamp.between(temp_start_timestamp, temp_end_timestamp), Candle.symbol == symbol, Candle.exchange == exchange).count() already_exists = count == driver.count if not already_exists: # it's today's candles if temp_end_timestamp < now if temp_end_timestamp > jh.now_to_timestamp(): temp_end_timestamp = arrow.utcnow().floor( 'minute').int_timestamp * 1000 - 60000 # fetch from market candles = driver.fetch(symbol, temp_start_timestamp) # check if candles have been returned and check those returned start with the right timestamp. # Sometimes exchanges just return the earliest possible candles if the start date doesn't exist. if not len(candles) or arrow.get( candles[0]['timestamp'] / 1000) > start_date: click.clear() first_existing_timestamp = driver.get_starting_time(symbol) # if driver can't provide accurate get_starting_time() if first_existing_timestamp is None: raise CandleNotFoundInExchange( f'No candles exists in the market for this day: {jh.timestamp_to_time(temp_start_timestamp)[:10]} \n' 'Try another start_date') # handle when there's missing candles during the period if temp_start_timestamp > first_existing_timestamp: # see if there are candles for the same date for the backup exchange, # if so, get those, if not, download from that exchange. if driver.backup_exchange is not None: candles = _get_candles_from_backup_exchange( exchange, driver.backup_exchange, symbol, temp_start_timestamp, temp_end_timestamp) else: temp_start_time = jh.timestamp_to_time( temp_start_timestamp)[:10] temp_existing_time = jh.timestamp_to_time( first_existing_timestamp)[:10] sync_publish( 'alert', { 'message': f'No candle exists in the market for {temp_start_time}. So ' f'Jesse started importing since the first existing date which is {temp_existing_time}', 'type': 'success' }) run(exchange, symbol, jh.timestamp_to_time(first_existing_timestamp)[:10], True) return # fill absent candles (if there's any) candles = _fill_absent_candles(candles, temp_start_timestamp, temp_end_timestamp) # store in the database if skip_confirmation: store_candles(candles) else: threading.Thread(target=store_candles, args=[candles]).start() # add as much as driver's count to the temp_start_time start_date = start_date.shift(minutes=driver.count) progressbar.update() sync_publish( 'progressbar', { 'current': progressbar.current, 'estimated_remaining_seconds': progressbar.estimated_remaining_seconds }) # sleep so that the exchange won't get angry at us if not already_exists: time.sleep(driver.sleep_time) # stop the status_checker time loop status_checker.stop() sync_publish( 'alert', { 'message': f'Successfully imported candles since {jh.timestamp_to_date(start_timestamp)} until today ({days_count} days). ', 'type': 'success' }) # if it is to skip, then it's being called from another process hence we should leave the database be if not skip_confirmation: # close database connection from jesse.services.db import database database.close_connection()