def run(): """ Runs migrations per each table and adds new fields in case they have not been added yet. Accepted action types: add, drop, rename, modify_type, allow_null, deny_null If actions type is 'rename', you must add new field with 'old_name' key. To make column to not nullable, you must clean all null value of columns. """ print('Running database migrations...') database.open_connection() # create migrator migrator = PostgresqlMigrator(database.db) # run migrations _candle(migrator) _completed_trade(migrator) _daily_balance(migrator) _log(migrator) _order(migrator) _orderbook(migrator) _ticker(migrator) _trade(migrator) # create initial tables from jesse.models import Candle, CompletedTrade, Log, Order, Option database.db.create_tables([Candle, CompletedTrade, Log, Order]) database.close_connection()
def get_config(client_config: dict, has_live=False) -> dict: from jesse.services.db import database database.open_connection() from jesse.models.Option import Option try: o = Option.get(Option.type == 'config') # merge it with client's config (because it could include new keys added), # update it in the database, and then return it data = jh.merge_dicts(client_config, json.loads(o.json)) # make sure the list of BACKTEST exchanges is up to date from jesse.modes.import_candles_mode.drivers import drivers for k in list(data['backtest']['exchanges'].keys()): if k not in drivers: del data['backtest']['exchanges'][k] # make sure the list of LIVE exchanges is up to date if has_live: from jesse_live.info import SUPPORTED_EXCHANGES_NAMES live_exchanges = list(sorted(SUPPORTED_EXCHANGES_NAMES)) for k in list(data['live']['exchanges'].keys()): if k not in live_exchanges: del data['live']['exchanges'][k] # fix the settlement_currency of exchanges for k, e in data['live']['exchanges'].items(): e['settlement_currency'] = jh.get_settlement_currency_from_exchange( e['name']) for k, e in data['backtest']['exchanges'].items(): e['settlement_currency'] = jh.get_settlement_currency_from_exchange( e['name']) o.updated_at = jh.now() o.save() except peewee.DoesNotExist: # if not found, that means it's the first time. Store in the DB and # then return what was sent from the client side without changing it o = Option({ 'id': jh.generate_unique_id(), 'updated_at': jh.now(), 'type': 'config', 'json': json.dumps(client_config) }) o.save(force_insert=True) data = client_config database.close_connection() return {'data': data}
def get_candles(exchange: str, symbol: str, timeframe: str): from jesse.services.db import database database.open_connection() from jesse.services.candle import generate_candle_from_one_minutes from jesse.models.utils import fetch_candles_from_db symbol = symbol.upper() num_candles = 210 one_min_count = jh.timeframe_to_one_minutes(timeframe) finish_date = jh.now(force_fresh=True) start_date = finish_date - (num_candles * one_min_count * 60_000) # fetch 1m candles from database candles = np.array( fetch_candles_from_db(exchange, symbol, start_date, finish_date)) # if there are no candles in the database, return [] if candles.size == 0: database.close_connection() return [] # leave out first candles until the timestamp of the first candle is the beginning of the timeframe timeframe_duration = one_min_count * 60_000 while candles[0][0] % timeframe_duration != 0: candles = candles[1:] # generate bigger candles from 1m candles if timeframe != '1m': generated_candles = [] for i in range(len(candles)): if (i + 1) % one_min_count == 0: bigger_candle = generate_candle_from_one_minutes( timeframe, candles[(i - (one_min_count - 1)):(i + 1)], True) generated_candles.append(bigger_candle) candles = generated_candles database.close_connection() return [{ 'time': int(c[0] / 1000), 'open': c[1], 'close': c[2], 'high': c[3], 'low': c[4], 'volume': c[5], } for c in candles]
def update_config(client_config: dict): from jesse.services.db import database database.open_connection() from jesse.models.Option import Option # at this point there must already be one option record for "config" existing, so: o = Option.get(Option.type == 'config') o.json = json.dumps(client_config) o.updated_at = jh.now() o.save() database.close_connection()
def run( debug_mode, user_config: dict, routes: List[Dict[str, str]], extra_routes: List[Dict[str, str]], start_date: str, finish_date: str, candles: dict = None, chart: bool = False, tradingview: bool = False, full_reports: bool = False, csv: bool = False, json: bool = False ) -> None: if not jh.is_unit_testing(): # at every second, we check to see if it's time to execute stuff status_checker = Timeloop() @status_checker.job(interval=timedelta(seconds=1)) def handle_time(): if process_status() != 'started': raise exceptions.Termination status_checker.start() from jesse.config import config, set_config config['app']['trading_mode'] = 'backtest' # debug flag config['app']['debug_mode'] = debug_mode # inject config if not jh.is_unit_testing(): set_config(user_config) # set routes router.initiate(routes, extra_routes) store.app.set_session_id() register_custom_exception_handler() # clear the screen if not jh.should_execute_silently(): click.clear() # validate routes validate_routes(router) # initiate candle store store.candles.init_storage(5000) # load historical candles if candles is None: candles = load_candles(start_date, finish_date) click.clear() if not jh.should_execute_silently(): sync_publish('general_info', { 'session_id': jh.get_session_id(), 'debug_mode': str(config['app']['debug_mode']), }) # candles info key = f"{config['app']['considering_candles'][0][0]}-{config['app']['considering_candles'][0][1]}" sync_publish('candles_info', stats.candles_info(candles[key]['candles'])) # routes info sync_publish('routes_info', stats.routes(router.routes)) # run backtest simulation simulator(candles, run_silently=jh.should_execute_silently()) # hyperparameters (if any) if not jh.should_execute_silently(): sync_publish('hyperparameters', stats.hyperparameters(router.routes)) if not jh.should_execute_silently(): if store.completed_trades.count > 0: sync_publish('metrics', report.portfolio_metrics()) routes_count = len(router.routes) more = f"-and-{routes_count - 1}-more" if routes_count > 1 else "" study_name = f"{router.routes[0].strategy_name}-{router.routes[0].exchange}-{router.routes[0].symbol}-{router.routes[0].timeframe}{more}-{start_date}-{finish_date}" store_logs(study_name, json, tradingview, csv) if chart: charts.portfolio_vs_asset_returns(study_name) sync_publish('equity_curve', charts.equity_curve()) # QuantStats' report if full_reports: price_data = [] # load close candles for Buy and hold and calculate pct_change for index, c in enumerate(config['app']['considering_candles']): exchange, symbol = c[0], c[1] if exchange in config['app']['trading_exchanges'] and symbol in config['app']['trading_symbols']: # fetch from database candles_tuple = Candle.select( Candle.timestamp, Candle.close ).where( Candle.timestamp.between(jh.date_to_timestamp(start_date), jh.date_to_timestamp(finish_date) - 60000), Candle.exchange == exchange, Candle.symbol == symbol ).order_by(Candle.timestamp.asc()).tuples() candles = np.array(candles_tuple) timestamps = candles[:, 0] price_data.append(candles[:, 1]) price_data = np.transpose(price_data) price_df = pd.DataFrame(price_data, index=pd.to_datetime(timestamps, unit="ms"), dtype=float).resample( 'D').mean() price_pct_change = price_df.pct_change(1).fillna(0) bh_daily_returns_all_routes = price_pct_change.mean(1) quantstats.quantstats_tearsheet(bh_daily_returns_all_routes, study_name) else: sync_publish('equity_curve', None) sync_publish('metrics', None) # close database connection from jesse.services.db import database database.close_connection()
def shutdown_event(): from jesse.services.db import database database.close_connection()
def run(exchange: str, symbol: str, start_date_str: str, skip_confirmation: bool = False, mode: str = 'candles') -> None: config['app']['trading_mode'] = mode # first, create and set session_id store.app.set_session_id() register_custom_exception_handler() # close database connection from jesse.services.db import database database.open_connection() # at every second, we check to see if it's time to execute stuff status_checker = Timeloop() @status_checker.job(interval=timedelta(seconds=1)) def handle_time(): if process_status() != 'started': raise exceptions.Termination status_checker.start() try: start_timestamp = jh.arrow_to_timestamp( arrow.get(start_date_str, 'YYYY-MM-DD')) except: raise ValueError( 'start_date must be a string representing a date before today. ex: 2020-01-17' ) # more start_date validations today = arrow.utcnow().floor('day').int_timestamp * 1000 if start_timestamp == today: raise ValueError( "Today's date is not accepted. start_date must be a string a representing date BEFORE today." ) elif start_timestamp > today: raise ValueError( "Future's date is not accepted. start_date must be a string a representing date BEFORE today." ) # We just call this to throw a exception in case of a symbol without dash jh.quote_asset(symbol) click.clear() symbol = symbol.upper() until_date = arrow.utcnow().floor('day') start_date = arrow.get(start_timestamp / 1000) days_count = jh.date_diff_in_days(start_date, until_date) candles_count = days_count * 1440 try: driver: CandleExchange = drivers[exchange]() except KeyError: raise ValueError(f'{exchange} is not a supported exchange') except TypeError: raise FileNotFoundError('You are missing the "plugins.py" file') loop_length = int(candles_count / driver.count) + 1 # ask for confirmation if not skip_confirmation: click.confirm( f'Importing {days_count} days candles from "{exchange}" for "{symbol}". Duplicates will be skipped. All good?', abort=True, default=True) progressbar = Progressbar(loop_length) for i in range(candles_count): temp_start_timestamp = start_date.int_timestamp * 1000 temp_end_timestamp = temp_start_timestamp + (driver.count - 1) * 60000 # to make sure it won't try to import candles from the future! LOL if temp_start_timestamp > jh.now_to_timestamp(): break # prevent duplicates calls to boost performance count = Candle.select().where( Candle.timestamp.between(temp_start_timestamp, temp_end_timestamp), Candle.symbol == symbol, Candle.exchange == exchange).count() already_exists = count == driver.count if not already_exists: # it's today's candles if temp_end_timestamp < now if temp_end_timestamp > jh.now_to_timestamp(): temp_end_timestamp = arrow.utcnow().floor( 'minute').int_timestamp * 1000 - 60000 # fetch from market candles = driver.fetch(symbol, temp_start_timestamp) # check if candles have been returned and check those returned start with the right timestamp. # Sometimes exchanges just return the earliest possible candles if the start date doesn't exist. if not len(candles) or arrow.get( candles[0]['timestamp'] / 1000) > start_date: click.clear() first_existing_timestamp = driver.get_starting_time(symbol) # if driver can't provide accurate get_starting_time() if first_existing_timestamp is None: raise CandleNotFoundInExchange( f'No candles exists in the market for this day: {jh.timestamp_to_time(temp_start_timestamp)[:10]} \n' 'Try another start_date') # handle when there's missing candles during the period if temp_start_timestamp > first_existing_timestamp: # see if there are candles for the same date for the backup exchange, # if so, get those, if not, download from that exchange. if driver.backup_exchange is not None: candles = _get_candles_from_backup_exchange( exchange, driver.backup_exchange, symbol, temp_start_timestamp, temp_end_timestamp) else: temp_start_time = jh.timestamp_to_time( temp_start_timestamp)[:10] temp_existing_time = jh.timestamp_to_time( first_existing_timestamp)[:10] sync_publish( 'alert', { 'message': f'No candle exists in the market for {temp_start_time}. So ' f'Jesse started importing since the first existing date which is {temp_existing_time}', 'type': 'success' }) run(exchange, symbol, jh.timestamp_to_time(first_existing_timestamp)[:10], True) return # fill absent candles (if there's any) candles = _fill_absent_candles(candles, temp_start_timestamp, temp_end_timestamp) # store in the database if skip_confirmation: store_candles(candles) else: threading.Thread(target=store_candles, args=[candles]).start() # add as much as driver's count to the temp_start_time start_date = start_date.shift(minutes=driver.count) progressbar.update() sync_publish( 'progressbar', { 'current': progressbar.current, 'estimated_remaining_seconds': progressbar.estimated_remaining_seconds }) # sleep so that the exchange won't get angry at us if not already_exists: time.sleep(driver.sleep_time) # stop the status_checker time loop status_checker.stop() sync_publish( 'alert', { 'message': f'Successfully imported candles since {jh.timestamp_to_date(start_timestamp)} until today ({days_count} days). ', 'type': 'success' }) # if it is to skip, then it's being called from another process hence we should leave the database be if not skip_confirmation: # close database connection from jesse.services.db import database database.close_connection()
def terminate_app() -> None: # close the database from jesse.services.db import database database.close_connection() # disconnect python from the OS os._exit(1)