def basetime(tf, timestamp): if tf <= 0.0: return timestamp elif tf < 7 * 24 * 60 * 60: # simplest return int(timestamp / tf) * tf elif tf == 7 * 24 * 60 * 60: # must find the UTC first day of week dt = datetime.utcfromtimestamp(timestamp) dt = dt.replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=UTC()) - timedelta(days=dt.weekday()) return dt.timestamp() elif tf == 30 * 24 * 60 * 60: # replace by first day of month at 00h00 UTC dt = datetime.utcfromtimestamp(timestamp) dt = dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0, tzinfo=UTC()) return dt.timestamp() return 0
def parse_datetime(self, ts): if ' ' in ts: return int( datetime.strptime(ts, '%Y-%m-%d %H:%M:%S').replace( tzinfo=UTC()).timestamp() * 1000) else: return int( datetime.strptime( ts, '%Y-%m-%d').replace(tzinfo=UTC()).timestamp() * 1000)
def parse_tick(self, row): parts = row.rstrip('\n').split(',') ts = int( datetime.strptime(parts[0] + '000', '%Y%m%d %H%M%S%f').replace( tzinfo=UTC()).timestamp() * 1000) return ts, parts[1], parts[2], parts[3]
def import_tick_mt5(self, broker_id, market_id, from_date, to_date, row): parts = row.split('\t') dt = datetime.strptime(parts[0] + ' ' + parts[1], '%Y.%m.%d %H:%M:%S.%f').replace(tzinfo=UTC()) timestamp = int(dt.timestamp() * 1000) if from_date and dt < from_date: return 0 if to_date and dt > to_date: return 0 if parts[2]: self.prev_bid = float(parts[2]) if parts[3]: self.prev_ask = float(parts[3]) if parts[5]: ltv = float(parts[5]) else: ltv = 0 Database.inst().store_market_trade( (broker_id, market_id, timestamp, self.prev_bid, self.prev_ask, ltv)) return 1
def get_historical_trades(self, symbol, from_date, to_date=None, limit=None): trades = [] endpoint = "trade" # quote" params = { 'symbol': symbol, 'reverse': 'false', 'count': limit or 500, # or max limit 'start': 0 } if to_date: params['endTime'] = self._format_datetime(to_date) start = 0 last_datetime = from_date while 1: if last_datetime: params['startTime'] = self._format_datetime(last_datetime) params['start'] = start # reset for next query start = 0 results = self.request(path=endpoint, query=params, verb='GET') for c in results: dt = self._parse_datetime(c['timestamp']).replace(tzinfo=UTC()) if to_date and dt > to_date: break if dt < last_datetime: # ignore because cannot fetch more precise than second continue if int(dt.timestamp()) == int(last_datetime.timestamp()): start += 1 yield ( int(dt.timestamp() * 1000), # integer ms c['price'], c['price'], c['size']) last_datetime = dt if (to_date and last_datetime > to_date) or len(results) < 500: break time.sleep(0.5) # don't excess API usage limit return trades
def __init__(self, watcher_service, trader_service, monitor_service, options): super().__init__("strategy", options) self._strategies = {} self._indicators = {} self._appliances = {} self._tradeops = {} self._regions = {} self._watcher_service = watcher_service self._trader_service = trader_service self._monitor_service = monitor_service self._identity = options.get('identity', 'demo') self._report_path = options.get('reports-path', './') self._watcher_only = options.get('watcher-only', False) self._profile = options.get('profile', 'default') self._indicators_config = utils.load_config(options, 'indicators') self._tradeops_config = utils.load_config(options, 'tradeops') self._regions_config = utils.load_config(options, 'regions') self._strategies_config = utils.load_config(options, 'strategies') self._profile_config = utils.load_config(options, "profiles/%s" % self._profile) # backtesting options self._backtesting = options.get('backtesting', False) self._from_date = options.get('from') # UTC tz self._to_date = options.get('to') # UTC tz self._timestep = options.get('timestep', 60.0) self._timestamp = 0 # in backtesting current processed timestamp # cannot be more recent than now from common.utils import UTC today = datetime.now().astimezone(UTC()) if self._from_date and self._from_date > today: self._from_date = today if self._to_date and self._to_date > today: self._to_date = today self._backtest = False self._start_ts = self._from_date.timestamp() if self._from_date else 0 self._end_ts = self._to_date.timestamp() if self._to_date else 0 self._timestep_thread = None self._time_factor = 0.0 if self._backtesting: # can use the time factor in backtesting only self._time_factor = options.get('time-factor', 0.0) # paper mode options self._paper_mode = options.get('paper-mode', False) self._next_key = 1 # worker pool of jobs for running data analysis self._worker_pool = WorkerPool()
def parse_min(self, row): parts = row.rstrip('\n').split(';') ts = int( datetime.strptime(parts[0], '%Y%m%d %H%M%S').replace(tzinfo=UTC()) * 1000) return ts, parts[1], parts[2], parts[3], parts[4], parts[1], parts[ 2], parts[3], parts[4], parts[5]
def get_historical_candles(self, symbol, bin_size, from_date, to_date=None, limit=None): """ Time interval [1m,5m,1h,1d]. """ candles = [] endpoint = "trade/bucketed" # "quote/bucketed" if bin_size not in self.BIN_SIZE: raise ValueError("BitMex does not support bin size %s !" % bin_size) params = { 'binSize': bin_size, 'symbol': symbol, 'reverse': 'false', 'count': limit or 750, # or max limit # 'start': 0 } if to_date: params['endTime'] = self._format_datetime(to_date) last_datetime = from_date while 1: if last_datetime: params['startTime'] = self._format_datetime(last_datetime) results = self.request(path=endpoint, query=params, verb='GET') for c in results: dt = self._parse_datetime(c['timestamp']).replace(tzinfo=UTC()) if to_date and dt > to_date: break yield ( int(dt.timestamp() * 1000), # integer ms c['open'], c['high'], c['low'], c['close'], c['open'], c['high'], c['low'], c['close'], c['volume']) last_datetime = dt if (to_date and last_datetime > to_date) or len(results) < 750: break time.sleep(0.5) # don't excess API usage limit
def get_historical_trades(self, symbol, from_date, to_date=None, limit=None): trades = [] endpoint = "trade" # quote" params = { 'symbol': symbol, 'reverse': 'false', 'count': limit or 500, # or max limit 'start': 0 } if to_date: params['endTime'] = self._format_datetime(to_date) start = 0 last_datetime = from_date last_trade_id = "" while 1: if last_datetime: params['startTime'] = self._format_datetime(last_datetime) params['start'] = start # offset if timestamp are same results = self.request(path=endpoint, query=params, verb='GET') for c in results: dt = self._parse_datetime(c['timestamp']).replace(tzinfo=UTC()) if to_date and dt > to_date: break if dt < last_datetime: start += 1 continue if last_trade_id == c['trdMatchID']: # could be in case of the last trade of the prev query is the first of the current query continue # increase offset when similar timestamp, else reset if dt == last_datetime: start += 1 else: start = 0 yield (int(dt.timestamp()*1000), # integer ms c['price'], c['price'], c['size']) last_datetime = dt last_trade_id = c['trdMatchID'] if (to_date and last_datetime > to_date) or len(results) < 500: break time.sleep(1.0) # don't excess API usage limit return trades
def get_historical_candles(self, symbol, bin_size, from_date, to_date=None, limit=None, partial=False): """ Time interval [1m,5m,1h,1d]. """ candles = [] endpoint = "trade/bucketed" # "quote/bucketed" if bin_size not in self.BIN_SIZE: raise ValueError("BitMex does not support bin size %s !" % bin_size) params = { 'binSize': bin_size, 'symbol': symbol, 'reverse': 'false', 'count': limit or 750, # or max limit # 'start': 0 } if partial: params['partial'] = True # because bitmex works in close time but we are in open time delta = self.BIN_SIZE_TO_TIMEFRAME[bin_size] if to_date: params['endTime'] = self._format_datetime(to_date + timedelta(seconds=delta)) last_datetime = from_date + timedelta(seconds=delta) ot = from_date # init while 1: if last_datetime: params['startTime'] = self._format_datetime(last_datetime) results = self.request(path=endpoint, query=params, verb='GET') for c in results: dt = self._parse_datetime(c['timestamp']).replace(tzinfo=UTC()) # its close time, want open time ot = dt - timedelta(seconds=delta) if to_date and ot > to_date: break yield (int(ot.timestamp()*1000), # integer ms c['open'], c['high'], c['low'], c['close'], c['open'], c['high'], c['low'], c['close'], c['volume']) last_datetime = dt if (to_date and ot > to_date) or len(results) < 750: break time.sleep(1.0) # don't excess API usage limit
def __bufferize(self): results = self.query(self._timeframe, self._curr_date, None, self._buffer_size, False) if results: self._buffer.extend(results) self._curr_date = datetime.fromtimestamp( results[-1].timestamp).replace(tzinfo=UTC()) else: self._curr_date = self._curr_date + timedelta( seconds=self._timeframe)
def parse_datetime(formatted): if formatted: try: if formatted.endswith('Z'): # always UTC formatted = formatted.rstrip('Z') if 'T' in formatted: if formatted.count(':') == 2: return datetime.strptime( formatted, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif formatted.count(':') == 1: return datetime.strptime( formatted, '%Y-%m-%dT%H:%M').replace(tzinfo=UTC()) elif formatted.count(':') == 0: return datetime.strptime( formatted, '%Y-%m-%dT%H').replace(tzinfo=UTC()) else: if formatted.count('-') == 2: return datetime.strptime(formatted, '%Y-%m-%d').replace(tzinfo=UTC()) elif formatted.count('-') == 1: return datetime.strptime(formatted, '%Y-%m').replace(tzinfo=UTC()) elif formatted.count('-') == 0: return datetime.strptime(formatted, '%Y').replace(tzinfo=UTC()) except: return None return None
def import_ohlc_siis_1_0_0(broker_id, market_id, timeframe, from_date, to_date, row): parts = row.split('\t') dt = datetime.strptime(parts[0], '%Y%m%d %H%M%S').replace(tzinfo=UTC()) timestamp = int(dt.timestamp() * 1000) if from_date and dt < from_date: return 0 if to_date and dt > to_date: return 0 Database.inst().store_market_ohlc( (broker_id, market_id, timestamp, int(timeframe), *parts[1:])) return 1
def import_ohlc_mt5(broker_id, market_id, timeframe, from_date, to_date, row): parts = row.split('\t') dt = datetime.strptime(parts[0] + ' ' + parts[1], '%Y.%m.%d %H:%M:%S').replace(tzinfo=UTC()) timestamp = int(dt.timestamp() * 1000) if from_date and dt < from_date: return 0 if to_date and dt > to_date: return 0 Database.inst().store_market_ohlc( (broker_id, market_id, timestamp, int(timeframe), parts[2], parts[3], parts[4], parts[5], parts[2], parts[3], parts[4], parts[5], parts[7])) return 1
def application(argv): fix_thread_set_name() # init terminal displayer Terminal() options = { 'working-path': os.getcwd(), 'identity': 'real', 'config-path': './user/config', 'log-path': './user/log', 'reports-path': './user/reports', 'markets-path': './user/markets', 'log-name': 'siis.log', } # create initial siis data structure if necessary install(options) siis_log = SiisLog(options, Terminal().inst().style()) siis_logger = logging.getLogger('siis') # parse process command line if len(argv) > 1: options['livemode'] = True # utc or local datetime ? for arg in argv: if arg.startswith('--'): if arg == '--paper-mode': # livemode but in paper-mode options['paper-mode'] = True elif arg == '--fetch': # use the fetcher options['fetch'] = True elif arg == '--binarize': # use the binarizer options['binarize'] = True elif arg == '--optimize': # use the optimizer options['optimize'] = True elif arg == '--sync': # use the syncer options['sync'] = True elif arg == '--rebuild': # use the rebuilder options['rebuild'] = True elif arg == '--install-market': options['install-market'] = True elif arg == '--initial-fetch': # do the initial OHLC fetch for watchers options['initial-fetch'] = True elif arg == '--backtest': # backtest mean always paper-mode options['paper-mode'] = True options['backtesting'] = True elif arg.startswith('--timestep='): # backesting timestep, default is 60 second options['timestep'] = float(arg.split('=')[1]) elif arg.startswith('--time-factor='): # backtesting time-factor options['time-factor'] = float(arg.split('=')[1]) elif arg.startswith('--from='): # if backtest from date (if ommited use whoole data) date format is "yyyy-mm-dd-hh:mm:ss", fetch, binarize, optimize to date options['from'] = datetime.strptime( arg.split('=')[1], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif arg.startswith('--to='): # if backtest to date (can be ommited), fetch, binarize, optimize to date options['to'] = datetime.strptime( arg.split('=')[1], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif arg.startswith('--last='): # fetch the last n data history options['last'] = int(arg.split('=')[1]) elif arg.startswith('--market='): # fetch, binarize, optimize the data history for this market options['market'] = arg.split('=')[1] elif arg.startswith('--spec='): # fetcher data history option options['option'] = arg.split('=')[1] elif arg.startswith('--broker='): # broker name for fetcher, watcher, optimize, binarize options['broker'] = arg.split('=')[1] elif arg.startswith('--timeframe='): # fetch, binarize, optimize base timeframe options['timeframe'] = arg.split('=')[1] elif arg.startswith('--cascaded='): # fetch cascaded ohlc generation options['cascaded'] = arg.split('=')[1] elif arg == '--watcher-only': # feed only with live data (not compatible with --read-only) options['watcher-only'] = True elif arg == '--read-only': # does not write to the database (not compatible with --watcher-only) options['read-only'] = True elif arg == '--check-data': # check DB ohlc data (@todo) options['check-data'] = True elif arg.startswith('--profile='): # appliances profile name options['profile'] = arg.split('=')[1] elif arg == '--version': Terminal.inst().info('%s %s' % (APP_SHORT_NAME, '.'.join( [str(x) for x in APP_VERSION]))) sys.exit(0) elif arg == '--help' or '-h': display_cli_help() sys.exit(0) else: options['identity'] = argv[1] # watcher-only read-only mutual exclusion if options.get('watcher-only') and options.get('read-only'): Terminal.inst().error( "Options --watcher-only and --read-only are mutually exclusive !" ) sys.exit(-1) # backtesting if options.get('backtesting', False): if options.get('from') is None or options.get('to') is None: del options['backtesting'] Terminal.inst().error( "Backtesting need from= and to= date time") sys.exit(-1) if options['identity'].startswith('-'): Terminal.inst().error("First option must be the identity name") # # binarizer mode # if options.get('binarize'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker'): from tools.binarizer import do_binarizer do_binarizer(options) else: display_cli_help() sys.exit(0) # # fetcher mode # if options.get('fetch'): if options.get('market') and options.get('broker'): from tools.fetcher import do_fetcher do_fetcher(options) else: display_cli_help() sys.exit(0) # # optimizer mode # if options.get('optimize'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker'): from tools.optimizer import do_optimizer do_optimizer(options) else: display_cli_help() sys.exit(0) # # sync mode # if options.get('sync'): if options.get('market') and options.get('broker'): from tools.syncer import do_syncer do_syncer(options) else: display_cli_help() sys.exit(0) # # rebuilder mode # if options.get('rebuild'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker') and options.get('timeframe'): from tools.rebuilder import do_rebuilder do_rebuilder(options) else: display_cli_help() sys.exit(0) # # normal mode # Terminal.inst().info("Starting SIIS using %s identity..." % options['identity']) Terminal.inst().action("- (Press 'q' twice to terminate)") Terminal.inst().action("- (Press 'h' for help)") Terminal.inst().flush() if options.get('backtesting'): Terminal.inst().notice("Process a backtesting.") if options.get('paper-mode'): Terminal.inst().notice("- Using paper-mode trader.") else: Terminal.inst().notice("- Using live-mode trader.") signal.signal(signal.SIGINT, signal_handler) # monitoring service Terminal.inst().info("Starting monitor service...") monitor_service = MonitorService(options) # desktop notifier desktop_service = DesktopNotifier() # discord_service = DiscordNotifier() view_service = ViewService() # database manager Database.create(options) Database.inst().setup(options) # watcher service Terminal.inst().info("Starting watcher's service...") watcher_service = WatcherService(options) watcher_service.start(options) # trader service Terminal.inst().info("Starting trader's service...") trader_service = TraderService(watcher_service, monitor_service, options) trader_service.start(options) # want to display desktop notification and update views watcher_service.add_listener(desktop_service) watcher_service.add_listener(view_service) # want to display desktop notification and update views trader_service.add_listener(desktop_service) trader_service.add_listener(view_service) # trader service listen to watcher service and update views watcher_service.add_listener(trader_service) # strategy service Terminal.inst().info("Starting strategy's service...") strategy_service = StrategyService(watcher_service, trader_service, monitor_service, options) strategy_service.start(options) # strategy service listen to watcher service watcher_service.add_listener(strategy_service) # strategy service listen to trader service trader_service.add_listener(strategy_service) # want to display desktop notification, update view and notify on discord # strategy_service.add_listener(notifier_service) # @todo add notifier service and replace desktop service as desktop notifier into this service same for discord... strategy_service.add_listener(desktop_service) strategy_service.add_listener(view_service) # for display stats (@todo move to views) desktop_service.strategy_service = strategy_service desktop_service.trader_service = trader_service # register terminal commands commands_handler = CommandsHandler() commands_handler.init(options) # cli commands registration register_general_commands(commands_handler) register_trading_commands(commands_handler, trader_service, strategy_service, monitor_service) register_region_commands(commands_handler, strategy_service) setup_views(siis_logger, view_service, watcher_service, trader_service, strategy_service) # setup and start the monitor service monitor_service.setup(watcher_service, trader_service, strategy_service) monitor_service.start() Terminal.inst().message("Running main loop...") Terminal.inst().upgrade() Terminal.inst().message("Steady...", view='notice') display_welcome() LOOP_SLEEP = 0.016 # in second MAX_CMD_ALIVE = 15 # in second running = True value = None value_changed = False command_timeout = 0 prev_timestamp = 0 try: while running: # keyboard input commands try: c = Terminal.inst().read() key = Terminal.inst().key() if c: # split the commande line args = [ arg for arg in (value[1:].split(' ') if value and value.startswith(':') else []) if arg ] if value and value[-1] == ' ': args.append('') # update the current type command commands_handler.process_char(c, args) if key: if key == 'KEY_ESCAPE': # cancel command value = None value_changed = True command_timeout = 0 # use command mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) # split the commande line args = [ arg for arg in (value[1:].split(' ') if value and value.startswith(':') else []) if arg ] if value and value[-1] == ' ': args.append('') # process on the arguments args = commands_handler.process_key( key, args, Terminal.inst().mode == Terminal.MODE_COMMAND) if args: # regen the updated commande ligne value = ":" + ' '.join(args) value_changed = True command_timeout = 0 desktop_service.on_key_pressed(key) # @todo move the rest to command_handler if c: if value and value[0] == ':': if c == '\b': # backspace, erase last command char value = value[:-1] if value else None value_changed = True command_timeout = time.time() elif c != '\n': # append to the advanced command value value += c value_changed = True command_timeout = time.time() elif c == '\n': result = commands_handler.process_cli(value) command_timeout = 0 if not result: # maybe an application level command if value == ':q' or value == ':quit': running = False elif value.startswith(':x '): # manually exit position at market @todo move as command target = value[2:] if target == "all" or target == "ALL": Terminal.inst().action( "Send close to market command for any positions", view='status') trader_service.command( Trader.COMMAND_CLOSE_ALL_MARKET, {}) else: Terminal.inst().action( "Send close to market command for position %s" % (target, ), view='status') trader_service.command( Trader.COMMAND_CLOSE_MARKET, {'key': target}) elif value.startswith(':d '): # @deprecated manually duplicate a position entry or exit must be associated to social strategy # @todo move as command target = value[2:] Terminal.inst().action( "Send replicate to market command for position %s" % (target, ), view='status') trader_service.command( Trader.COMMAND_TRIGGER, {'key': target}) # clear command value value_changed = True value = None # use default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) elif c != '\n': # initial command value value = "" + c value_changed = True command_timeout = time.time() if value and value[0] == ':': # use command mode Terminal.inst().set_mode(Terminal.MODE_COMMAND) if value and value[0] != ':': # direct key # use default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) try: result = commands_handler.process_accelerator(key) # @todo convert to Command object accelerator if not result: result = True # @todo might be replaced by views if value == 'p': trader_service.command( Trader.COMMAND_LIST_POSITIONS, {}) elif value == 'o': trader_service.command( Trader.COMMAND_LIST_ORDERS, {}) elif value == 'g': trader_service.command( Trader.COMMAND_SHOW_PERFORMANCE, {}) # display views elif value == 'C': Terminal.inst().clear_content() elif value == 'D': Terminal.inst().switch_view('debug') elif value == 'I': Terminal.inst().switch_view('content') elif value == 'F': Terminal.inst().switch_view('strategy') elif value == 'S': Terminal.inst().switch_view('stats') elif value == 'P': Terminal.inst().switch_view('perf') elif value == 'T': Terminal.inst().switch_view('ticker') elif value == 'A': Terminal.inst().switch_view('account') elif value == 'M': Terminal.inst().switch_view('market') elif value == 'Q': Terminal.inst().switch_view('asset') elif value == 'N': Terminal.inst().switch_view('signal') elif value == '?': # ping services and workers watcher_service.ping() trader_service.ping() strategy_service.ping() monitor_service.ping() elif value == ' ': # a simple mark on the terminal Terminal.inst().notice( "Trading time %s" % (datetime.fromtimestamp( strategy_service.timestamp). strftime('%Y-%m-%d %H:%M:%S')), view='status') elif value == 'a': desktop_service.audible = not desktop_service.audible Terminal.inst().action( "Audible notification are now %s" % ("actives" if desktop_service.audible else "disabled", ), view='status') elif value == 'n': desktop_service.popups = not desktop_service.popups Terminal.inst().action( "Desktop notification are now %s" % ("actives" if desktop_service.popups else "disabled", ), view='status') elif value == 'e': desktop_service.discord = not desktop_service.discord Terminal.inst().action( "Discord notification are now %s" % ("actives" if desktop_service.discord else "disabled", ), view='status') else: result = False if result: value = None value_changed = True command_timeout = 0 except Exception as e: has_exception(siis_logger, e) except IOError: pass except Exception as e: has_exception(siis_logger, e) # display advanced command only if value_changed: if value and value.startswith(':'): Terminal.inst().action("Command: %s" % value[1:], view='command') else: Terminal.inst().message("", view='command') # clear input if no char hit during the last MAX_CMD_ALIVE if value and not value.startswith(':'): if (command_timeout > 0) and (time.time() - command_timeout >= MAX_CMD_ALIVE): value = None value_changed = True Terminal.inst().info("Current typing canceled", view='status') try: # display strategy tarding time (update max once per second) if strategy_service.timestamp - prev_timestamp >= 1.0: mode = "live" if trader_service.backtesting: mode = "backtesting" elif trader_service.paper_mode: mode = "paper-mode" Terminal.inst().message( "%s - %s" % (mode, datetime.fromtimestamp(strategy_service.timestamp). strftime('%Y-%m-%d %H:%M:%S')), view='notice') prev_timestamp = strategy_service.timestamp # synchronous operations here watcher_service.sync() trader_service.sync() strategy_service.sync() monitor_service.sync() desktop_service.sync() view_service.sync() Terminal.inst().update() except BaseException as e: siis_logger.error(traceback.format_exc()) Terminal.inst().error(repr(e)) # don't waste CPU time on main thread time.sleep(LOOP_SLEEP) finally: Terminal.inst().restore_term() Terminal.inst().info("Terminate...") Terminal.inst().flush() commands_handler.terminate(options) commands_handler = None # service terminate monitor_service.terminate() strategy_service.terminate() trader_service.terminate() watcher_service.terminate() desktop_service.terminate() # discord_service.terminate() view_service.terminate() Terminal.inst().info("Saving database...") Terminal.inst().flush() Database.terminate() Terminal.inst().info("Bye!") Terminal.inst().flush() Terminal.terminate()
def fetch_candles(self, market_id, timeframe, from_date=None, to_date=None, n_last=None, fetch_option=""): try: if n_last: data = self._connector.history_last_n(market_id, timeframe, n_last) else: data = self._connector.history_range(market_id, timeframe, from_date, to_date) except Exception as e: logger.error(repr(e)) error_logger.error(traceback.format_exc()) data = {} prices = data.get('prices', []) # get local timezone, assume its the same of the account, or overrided by account detail tzname = self._tzname or time.tzname[0] pst = pytz.timezone(tzname) for price in prices: dt = datetime.strptime(price['snapshotTimeUTC'], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) # ldt = datetime.strptime(price['snapshotTime'], '%Y/%m/%d %H:%M:%S') # timezone + DST aware conversion # print("<", dt, ldt) # dt = dt + pst.localize(ldt).dst() + pst.localize(ldt).utcoffset() # fix for D,W,M snapshotTimeUTC, probaby because of the DST (then might be +1 or -1 hour) if timeframe in (Instrument.TF_1D, Instrument.TF_1W, Instrument.TF_1M): if dt.hour == 23: # is 23:00 on the previous day, add 1h dt = dt + timedelta(hours=1) elif dt.hour == 1: # is 01:00 on the same day, sub 1h dt = dt - timedelta(hours=1) elif timeframe == Instrument.TF_4H: if dt.hour in (3, 7, 11, 15, 19, 23): dt = dt + timedelta(hours=1) elif dt.hour in (1, 5, 9, 13, 17, 21): dt = dt - timedelta(hours=1) # print(">", dt, ldt) timestamp = dt.timestamp() if price.get('highPrice')['bid'] is None and price.get('highPrice')['ask'] is None: # ignore empty candles continue # yield (timestamp, high bid, low, open, close, high ofr, low, open, close, volume) yield([int(timestamp * 1000), str(price.get('openPrice')['bid'] or price.get('openPrice')['ask']), str(price.get('highPrice')['bid'] or price.get('highPrice')['ask']), str(price.get('lowPrice')['bid'] or price.get('lowPrice')['ask']), str(price.get('closePrice')['bid'] or price.get('closePrice')['ask']), str(price.get('openPrice')['ask'] or price.get('openPrice')['bid']), str(price.get('highPrice')['ask'] or price.get('highPrice')['bid']), str(price.get('lowPrice')['ask'] or price.get('lowPrice')['bid']), str(price.get('closePrice')['ask'] or price.get('closePrice')['bid']), price.get('lastTradedVolume', '0')])
def do_exporter(options): Terminal.inst().info("Starting SIIS exporter...") Terminal.inst().flush() # database manager Database.create(options) Database.inst().setup(options) broker_id = options['broker'] market_id = options['market'] timeframe = None # UTC option dates from_date = options.get('from') to_date = options.get('to') if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe or 1.0) to_date = to_date.replace(microsecond=0) if not options.get('timeframe'): timeframe = None else: if options['timeframe'] in TIMEFRAME_FROM_STR_MAP: timeframe = TIMEFRAME_FROM_STR_MAP[options['timeframe']] else: try: timeframe = int(options['timeframe']) except: pass filename = options.get('filename') cur_datetime = datetime.now().astimezone( UTC()).strftime("%Y-%m-%dT%H:%M:%SZ") from_date_str = from_date.strftime("%Y-%m-%dT%H:%M:%SZ") to_date_str = to_date.strftime("%Y-%m-%dT%H:%M:%SZ") try: # exporting data... if timeframe is None: for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue dst = open("%s-%s-%s-any.siis" % (filename, broker_id, market), "wt") # write file header dst.write( "format=SIIS\tversion=%s\tcreated=%s\tbroker=%s\tmarket=%s\tfrom=%s\tto=%s\ttimeframe=any\n" % (EXPORT_VERSION, cur_datetime, broker_id, market, from_date_str, to_date_str)) for tf in EXPORT_TF: Terminal.inst().info("Exporting %s OHLC %s..." % (market, timeframe_to_str(tf))) dst.write("timeframe=%s\n" % timeframe_to_str(tf)) export_ohlcs_siis_1_0_0(options['broker'], market, tf, from_date, to_date, dst) dst.close() dst = None elif timeframe == Instrument.TF_TICK: for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue dst = open("%s-%s-%s-t.siis" % (filename, broker_id, market), "wt") # write file header dst.write( "format=SIIS\tversion=%s\tcreated=%s\tbroker=%s\tmarket=%s\tfrom=%s\tto=%s\ttimeframe=t\n" % (EXPORT_VERSION, cur_datetime, broker_id, market, from_date_str, to_date_str)) Terminal.inst().info("Exporting %s ticks/trades..." % (market, )) dst.write("timeframe=t\n") export_ticks_siis_1_0_0(options['broker'], market, from_date, to_date, dst) dst.close() dst = None elif timeframe > 0: # particular ohlc for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue dst = open( "%s-%s-%s-%s.siis" % (filename, broker_id, market, timeframe_to_str(timeframe)), "wt") # write file header dst.write( "format=SIIS\tversion=%s\tcreated=%s\tbroker=%s\tmarket=%s\tfrom=%s\tto=%s\ttimeframe=%s\n" % (EXPORT_VERSION, cur_datetime, broker_id, market, from_date_str, to_date_str, timeframe_to_str(timeframe))) Terminal.inst().info("Exporting %s OHLC %s..." % (market, timeframe_to_str(timeframe))) dst.write("timeframe=%s\n" % timeframe_to_str(timeframe)) export_ohlcs_siis_1_0_0(options['broker'], market, timeframe, from_date, to_date, dst) dst.close() dst = None except KeyboardInterrupt: pass except Exception as e: error_logger.error(str(e)) dst.close() dst = None finally: pass Database.terminate() Terminal.inst().info("Exportation done!") Terminal.inst().flush() Terminal.terminate() sys.exit(0)
def get_historical_candles(self, symbol, interval, from_date, to_date=None, limit=None): """ Time interval [1m,5m,1h,4h,1d,1w,15d]. """ if interval not in self.INTERVALS: raise ValueError("Kraken does not support interval %s !" % interval) params = { 'pair': symbol, 'interval': interval, } last_datetime = from_date.timestamp( ) - 1.0 # minus 1 sec else will not have from current to_ts = to_date.timestamp() delta = None # but we disallow 1w and 15d because 1w starts on a thuesday if interval == 10080: delta = timedelta(days=3) while 1: if last_datetime: params['since'] = int(last_datetime) results = self.query_public('OHLC', params) if results.get('error', []): raise ValueError("Kraken historical candle : %s !" % '\n'.join(results['error'])) candles = results.get('result', {}).get(symbol, []) for c in candles: if delta: dt = (datetime.fromtimestamp(c[0]).replace(tzinfo=UTC()) - delta).timestamp() else: dt = c[0] if to_ts and dt > to_ts: break yield ( int(dt * 1000), # integer ms c[1], c[2], c[3], c[4], # ohlc c[6]) # volume last_datetime = dt if (to_ts and dt > to_ts): break # kraken does not manage lot of history (no need to loop) break
def fetch_and_generate(self, market_id, timeframe, from_date=None, to_date=None, n_last=1000, fetch_option="", cascaded=None): if timeframe > 0 and timeframe not in self.GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe, )) return generators = [] from_tf = timeframe self._last_ticks = [] self._last_ohlcs = {} if not from_date and n_last: # compute a from date today = datetime.now().astimezone(UTC()) if timeframe >= Instrument.TF_MONTH: from_date = ( today - timedelta(months=int(timeframe / Instrument.TF_MONTH) * n_last)).replace(day=1).replace(hour=0).replace( minute=0).replace(second=0) elif timeframe >= Instrument.TF_1D: from_date = (today - timedelta( days=int(timeframe / Instrument.TF_1D) * n_last)).replace( hour=0).replace(minute=0).replace(second=0) elif timeframe >= Instrument.TF_1H: from_date = (today - timedelta( hours=int(timeframe / Instrument.TF_1H) * n_last)).replace( minute=0).replace(second=0) elif timeframe >= Instrument.TF_1M: from_date = ( today - timedelta(minutes=int(timeframe / Instrument.TF_1M) * n_last)).replace(second=0) elif timeframe >= Instrument.TF_1S: from_date = (today - timedelta( seconds=int(timeframe / Instrument.TF_1S) * n_last)) from_date = from_date.replace(microsecond=0) if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) # cascaded generation of candles if cascaded: for tf in Fetcher.GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation self._last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: self._last_ohlcs[timeframe] = [] n = 0 t = 0 if timeframe == 0: for data in self.fetch_trades(market_id, from_date, to_date, None): # store (int timestamp in ms, str bid, str ofr, str volume) Database.inst().store_market_trade( (self.name, market_id, data[0], data[1], data[2], data[3])) if generators: self._last_ticks.append( (float(data[0]) * 0.001, float(data[1]), float(data[2]), float(data[3]))) # generate higher candles for generator in generators: if generator.from_tf == 0: candles = generator.generate_from_ticks( self._last_ticks) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf] += candles # remove consumed ticks self._last_ticks = [] else: candles = generator.generate_from_candles( self._last_ohlcs[generator.from_tf]) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf] += candles # remove consumed candles self._last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 10000: n = 0 Terminal.inst().info("%i trades for %s..." % (t, market_id)) # calm down the storage of tick, if parsing is faster while Database.inst().num_pending_ticks_storage( ) > Fetcher.MAX_PENDING_TICK: time.sleep(Fetcher.TICK_STORAGE_DELAY ) # wait a little before continue logger.info("Fetched %i trades for %s" % (t, market_id)) elif timeframe > 0: for data in self.fetch_candles(market_id, timeframe, from_date, to_date, None): # store (int timestamp ms, str open bid, high bid, low bid, close bid, open ofr, high ofr, low ofr, close ofr, volume) Database.inst().store_market_ohlc( (self.name, market_id, data[0], int(timeframe), data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9])) if generators: candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) candle.set_consolidated(True) self._last_ohlcs[timeframe].append(candle) # generate higher candles for generator in generators: candles = generator.generate_from_candles( self._last_ohlcs[generator.from_tf]) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf].extend(candles) # remove consumed candles self._last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info( "%i candles for %s in %s..." % (t, market_id, timeframe_to_str(timeframe))) logger.info("Fetched %i candles for %s in %s" % (t, market_id, timeframe_to_str(timeframe)))
def _ws_message(self, message, data): if message == 'action': # # account data update # if data[1] in ('margin', 'instrument', 'quote'): funds = self.connector.ws.funds() ratio = 1.0 currency = funds['currency'] # convert XBt to BTC if currency == 'XBt': ratio = 1.0 / 100000000.0 currency = 'XBT' # walletBalance or amount, riskLimit is max leverage account_data = (funds['walletBalance'] * ratio, funds['marginBalance'] * ratio, funds['unrealisedPnl'] * ratio, currency, funds['riskLimit'] * ratio) self.service.notify(Signal.SIGNAL_ACCOUNT_DATA, self.name, account_data) elif data[1] == 'liquidation' and data[0] == 'insert': # action exec_logger.info("bitmex l226 liquidation > %s " % str(data)) for ld in data[3]: # data[3]['orderID'] # symbol is market-id, side is the liquidation order direction, then the initial liquided order was in the opposite liquidation_data = (ld['symbol'], time.time(), 1 if ld['side'] == "Buy" else -1, ld['price'], ld['leavesQty']) self.service.notify(Signal.SIGNAL_LIQUIDATION_DATA, self.name, liquidation_data) Database.inst().store_market_liquidation( (self.name, liquidation_data[0], int(liquidation_data[1] * 1000.0), liquidation_data[2], liquidation_data[3], liquidation_data[4])) # # orders partial execution # if data[1] == 'execution' and data[2]: for ld in data[3]: exec_logger.info("bitmex l185 execution > %s" % str(ld)) # # positions # elif data[1] == 'position': # action for ld in data[3]: ref_order_id = "" symbol = ld['symbol'] position_id = symbol # 'leverage': 10, 'crossMargin': False if ld.get('currentQty') is None: # no position continue # exec_logger.info("bitmex.com position %s" % str(ld)) if ld.get('currentQty', 0) != 0: direction = Order.SHORT if ld[ 'currentQty'] < 0 else Order.LONG elif ld.get('openOrderBuyQty', 0) > 0: direction = Order.LONG elif ld.get('openOrderSellQty', 0) > 0: direction = Order.SHORT else: direction = 0 operation_time = self._parse_datetime( ld.get('timestamp')).replace(tzinfo=UTC()).timestamp() quantity = abs(float(ld['currentQty'])) # 'execQty': ?? 'execBuyQty', 'execSellQty': ?? # 'commission': 0.00075 'execComm': 0 ?? 'currentComm': 0 position_data = { 'id': symbol, 'symbol': symbol, 'direction': direction, 'timestamp': operation_time, 'quantity': quantity, 'avg-entry-price': ld.get('avgEntryPrice', None), 'exec-price': None, 'stop-loss': None, 'take-profit': None, 'cumulative-filled': quantity, 'filled': None, # no have 'liquidation-price': ld.get('liquidationPrice'), 'commission': ld.get('commission', 0.0), 'profit-currency': ld.get('currency'), 'profit-loss': ld.get('unrealisedPnl'), 'profit-loss-rate': ld.get('unrealisedPnlPcnt') } if (ld.get('openOrderSellQty', 0) or ld.get( 'openOrderSellQty', 0)) and quantity == 0.0: # not current quantity, but open order qty self.service.notify( Signal.SIGNAL_POSITION_OPENED, self.name, (symbol, position_data, ref_order_id)) elif quantity > 0: # current qty updated self.service.notify( Signal.SIGNAL_POSITION_UPDATED, self.name, (symbol, position_data, ref_order_id)) else: # empty quantity no open order qty, position deleted self.service.notify( Signal.SIGNAL_POSITION_DELETED, self.name, (symbol, position_data, ref_order_id)) # # orders # elif data[1] == 'order': for ld in data[3]: exec_logger.info("bitmex.com order %s" % str(ld)) symbol = ld.get('symbol') status = ld.get('ordStatus', None) if not status: # updated operation_time = self._parse_datetime( ld.get('timestamp')).replace( tzinfo=UTC()).timestamp() # quantity or price modified if (ld.get('orderQty') or ld.get('price') or ld.get('stopPx') ) and ld.get('workingIndicator'): order = { 'id': ld['orderID'], 'symbol': symbol, 'timestamp': operation_time, 'quantity': ld.get('orderQty', None), 'price': ld.get('price'), 'stop-price': ld.get('stopPx'), 'stop-loss': None, 'take-profit': None } self.service.notify(Signal.SIGNAL_ORDER_UPDATED, self.name, (symbol, order, "")) elif status == 'New': # action='insert' transact_time = self._parse_datetime( ld.get('transactTime')).replace( tzinfo=UTC()).timestamp() if ld['ordType'] == 'Market': order_type = Order.ORDER_MARKET elif ld['ordType'] == 'Limit': order_type = Order.ORDER_LIMIT elif ld['ordType'] == 'Stop': order_type = Order.ORDER_STOP elif ld['ordType'] == 'StopLimit': order_type = Order.ORDER_STOP_LIMIT elif ld['ordType'] == 'MarketIfTouched': order_type = Order.ORDER_TAKE_PROFIT elif ld['ordType'] == 'LimitIfTouched': order_type = Order.ORDER_TAKE_PROFIT_LIMIT else: order_type = Order.ORDER_MARKET if ld['timeInForce'] == 'GoodTillCancel': time_in_force = Order.TIME_IN_FORCE_GTC elif ld['timeInForce'] == 'ImmediateOrCancel': time_in_force = Order.TIME_IN_FORCE_IOC elif ld['timeInForce'] == 'FillOrKill': time_in_force = Order.TIME_IN_FORCE_FOK else: time_in_force = Order.TIME_IN_FORCE_GTC # execution options exec_inst = ld.get('execInst', '').split(',') # execution price if 'LastPrice' in exec_inst: price_type = Order.PRICE_LAST elif 'IndexPrice' in exec_inst: price_type = Order.PRICE_MARK elif 'MarkPrice' in exec_inst: price_type = Order.PRICE_INDEX else: price_type = Order.PRICE_LAST order = { 'id': ld['orderID'], 'symbol': symbol, 'direction': Order.LONG if ld['side'] == 'Buy' else Order.SHORT, 'type': order_type, 'timestamp': transact_time, 'quantity': ld.get('orderQty', 0), 'price': ld.get('price'), 'stop-price': ld.get('stopPx'), 'time-in-force': time_in_force, 'post-only': 'ParticipateDoNotInitiate' in exec_inst, # maker only (not taker) 'close-only': 'Close' in exec_inst, 'reduce-only': 'ReduceOnly' in exec_inst, 'price-type': price_type, 'stop-loss': None, 'take-profit': None } self.service.notify( Signal.SIGNAL_ORDER_OPENED, self.name, (symbol, order, ld.get('clOrdID', ""))) elif status == 'Canceled': # action='update' self.service.notify( Signal.SIGNAL_ORDER_CANCELED, self.name, (symbol, ld['orderID'], ld.get('clOrdID', ""))) elif status == 'Rejected': # action='update' reason = "" if ld.get('ordRejReason') == 'INSUFFICIENT_BALANCE': reason = 'insufficient balance' self.service.notify(Signal.SIGNAL_ORDER_REJECTED, self.name, (symbol, ld.get('clOrdID', ""))) elif status == 'Filled': # action='update' operation_time = datetime.strptime( ld.get('timestamp', '1970-01-01 00:00:00.000Z'), "%Y-%m-%dT%H:%M:%S.%fZ").replace( tzinfo=UTC()).timestamp() # 'workingIndicator': False, if fully filled # 'leavesQty': 0, if fully filled # 'currency': 'XBT', 'settlCurrency': 'XBt', 'triggered': '', 'simpleLeavesQty': None, 'leavesQty': 10000, 'simpleCumQty': None, 'cumQty': 0, 'avgPx': None, ... order = { 'id': ld['orderID'], 'symbol': symbol, # 'direction': direction, # no have 'timestamp': operation_time, 'quantity': ld.get('orderQty', 0.0), 'filled': None, # no have 'cumulative-filled': ld.get('cumQty', 0), 'exec-price': None, # no have 'avg-price': ld.get('avgPx', 0), # averaged for the cumulative # 'maker': , # trade execution over or counter the market : true if maker, false if taker } self.service.notify( Signal.SIGNAL_ORDER_TRADED, self.name, (symbol, order, ld.get('clOrdID', ""))) # # market # # elif data[1] == 'trade' and data[0] == 'insert': # exec_logger.info("bitmex.com trade %s" % str(data)) # for trade in data[3]: # # trade data # market_id = trade['symbol'] # trade_time = datetime.strptime(trade['timestamp'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=UTC()).timestamp() # direction = Order.LONG if trade['side'] == 'Buy' else Order.SHORT # quantity = trade['size'] # price = trade['price'] # # 'homeNotional': 0.4793 value in XBT # # we have a tick when we have a volume in data content # tick = (trade_time, price, price, quantity) # # and notify # self.service.notify(Signal.SIGNAL_TICK_DATA, self.name, (market_id, tick)) # if not self._read_only and self._store_trade: # # store trade # Database.inst().store_market_trade((self.name, symbol, int(trade_time*1000), price, price, quantity)) elif (data[1] == 'instrument' or data[1] == 'quote') and data[0] == 'insert' or data[0] == 'update': # instrument and quote data (bid, ofr, volume) for market_id in data[2]: instrument = self.connector.ws.get_instrument(market_id) if market_id not in self._watched_instruments: # not a symbol of interest continue # # notify a market data update # tradeable = instrument.get('state', 'Closed') == 'Open' update_time = datetime.strptime( instrument.get('timestamp', '1970-01-01 00:00:00.000Z'), "%Y-%m-%dT%H:%M:%S.%fZ").replace( tzinfo=UTC()).timestamp() symbol = instrument.get('symbol', '') base_symbol = instrument.get('rootSymbol', 'USD') quote_symbol = symbol[-3:] # base to XBT base_exchange_rate = 1.0 # base instrument base_market_id = "XBT" + quote_symbol base_market = None if quote_symbol == "USD" and base_market_id != symbol: xbtusd_market = self.connector.ws.get_instrument( "XBTUSD") if xbtusd_market: base_exchange_rate = instrument.get( 'lastPrice', 1.0) / xbtusd_market.get( 'lastPrice', 1.0) elif base_market_id != symbol: base_market = self.connector.ws.get_instrument( base_market_id) xbtusd_market = self.connector.ws.get_instrument( "XBTUSD") if base_market and xbtusd_market: base_exchange_rate = instrument.get( 'lastPrice', 1.0) / (base_market.get('lastPrice', 1.0) / xbtusd_market.get('lastPrice', 1.0)) bid = instrument.get('bidPrice') ofr = instrument.get('askPrice') if bid is not None and ofr is not None: # update contract size and value per pip if quote_symbol == 'USD' and base_market_id == symbol: # XBTUSD... contract_size = 1.0 / instrument.get( 'lastPrice', 1.0) value_per_pip = contract_size * instrument.get( 'lastPrice', 1.0) elif base_market_id == symbol: # XBTU19... contract_size = 1.0 / instrument.get( 'lastPrice', 1.0) value_per_pip = contract_size * instrument.get( 'lastPrice', 1.0) elif quote_symbol == 'USD' and base_market_id != symbol: # ETHUSD... contract_size = (0.001 * 0.0001) * instrument.get( 'lastPrice', 1.0) value_per_pip = contract_size * instrument.get( 'lastPrice', 1.0) elif base_market and base_market_id != symbol: # ADAZ18... contract_size = 0.0001 * instrument.get( 'lastPrice', 1.0) value_per_pip = contract_size * instrument.get( 'lastPrice', 1.0) else: contract_size = 0.0001 * instrument.get( 'lastPrice', 1.0) value_per_pip = contract_size * instrument.get( 'lastPrice', 1.0) vol24h = instrument.get('volume24h') vol24hquote = instrument.get('foreignNotional24h') market_data = (market_id, tradeable, update_time, bid, ofr, base_exchange_rate, contract_size, value_per_pip, vol24h, vol24hquote) self.service.notify(Signal.SIGNAL_MARKET_DATA, self.name, market_data) # # notify a tick data update # # if action == 'update': # self.connector.ws.get_ticker(market_id) volume = instrument.get('volume', 0) # ex: 32057250 last_bid = None last_ofr = None last_vol = None if 'bidPrice' in data[3][0] and data[3][0]['bidPrice']: # price update last_bid = float(data[3][0]['bidPrice']) if 'askPrice' in data[3][0] and data[3][0]['askPrice']: # price update last_ofr = float(data[3][0]['askPrice']) if 'volume' in data[3][0] and data[3][0]['volume']: last_vol = float(data[3][0]['volume']) # exec_logger.info("bitmex l325 > %s : %s %s %s / last %s %s %s" % (market_id, bid, ofr, volume, last_bid, last_ofr, last_vol)) if bid is not None and ofr is not None and volume is not None and last_vol: # we have a tick when we have a volume in data content tick = (update_time, bid, ofr, volume) # with self._mutex: # self._last_tick[market_id] = tick # and notify self.service.notify(Signal.SIGNAL_TICK_DATA, self.name, (market_id, tick)) if not self._read_only and self._store_trade: # store trade/tick Database.inst().store_market_trade( (self.name, symbol, int(update_time * 1000), bid, ofr, volume)) for tf in Watcher.STORED_TIMEFRAMES: # generate candle per each timeframe with self._mutex: candle = self.update_ohlc( market_id, tf, update_time, last_bid, last_ofr, last_vol) if candle is not None: self.service.notify( Signal.SIGNAL_CANDLE_DATA, self.name, (market_id, candle)) # # order book L2 top 25 # elif data[1] == 'orderBookL2_25' and data[2]: pass
def fetch_market(self, market_id): """ Fetch and cache it. It rarely changes, except for base exchange rate, so assume it once for all. @todo min/max/step/min_notional """ instrument = self.connector.ws.get_instrument(market_id) # funds = self.connector.ws.funds() # to get account base currency (if XBt or XBT) xbt_usd = self.connector.ws.get_instrument("XBTUSD") if instrument: # tickSize is the minimum price increment (0.5USD for XBTUSD) tradeable = instrument.get('state', 'Closed') == 'Open' update_time = self._parse_datetime( instrument.get('timestamp')).replace(tzinfo=UTC()).timestamp() symbol = instrument.get('symbol', '') base_symbol = instrument.get('rootSymbol', '') quote_symbol = symbol[-3:] # if funds['currency'] == 'XBt': # # XBt to XBT # ratio = 1.0 / 100000000.0 # if base_symbol == 'USD': # # USD is base then convert to XBT # ratio *= to_base_rate bid = instrument.get('bidPrice') ofr = instrument.get('askPrice') market = Market(market_id, symbol) # compute base precision from the tick size, example 0.05 => 2 base_precision = -math.floor( math.log10(instrument.get('tickSize', 1.0))) market.set_base(base_symbol, base_symbol, base_precision) market.set_quote(quote_symbol, quote_symbol) # base to XBT market.base_exchange_rate = 1.0 # base instrument base_market_id = "XBT" + quote_symbol base_market = self.connector.ws.get_instrument(base_market_id) xbtusd_market = self.connector.ws.get_instrument("XBTUSD") if quote_symbol == "USD" and base_market_id != symbol and xbtusd_market: market.base_exchange_rate = instrument.get( 'lastPrice', 1.0) / xbtusd_market.get('lastPrice', 1.0) elif base_market_id != symbol and base_market and xbtusd_market: market.base_exchange_rate = instrument.get( 'lastPrice', 1.0) / (base_market.get('lastPrice', 1.0) / xbtusd_market.get('lastPrice', 1.0)) # @todo 'multiplier', 'riskStep', 'riskLimit' # limits min_notional = 1.0 # $ if quote_symbol != "USD" and base_market_id != "XBT": # any contract on futur XBT quote min_notional = 0.0001 # BCHXBT 'maxOrderQty': 100000000, 'maxPrice': 10, 'lotSize': 1, 'tickSize': 0.0001, # XBCUSD 'maxOrderQty': 10000000, 'maxPrice': 1000000, 'lotSize': 1, 'tickSize': 0.5, market.set_size_limits(instrument.get('tickSize', 1.0), instrument.get('maxOrderQty', 0.0), instrument.get('tickSize', 1.0)) market.set_notional_limits(min_notional, instrument.get('maxPrice', 0.0), 0.0) market.set_price_limits(0.0, 0.0, instrument.get('tickSize', 1.0)) # need to divided by account currency XBt = 100000000 market.margin_factor = instrument.get('initMargin', 1.0) # market.max_margin_factor = 1.0 / (instrument.get('riskLimit', 1.0) * ratio) # ex: 20000000000 for max leverage 200 # '-' if perpetual else match the regexp and keep the expiry part only expiry = BitMexWatcher.EXPIRY_RE.match(market_id) # or instrument.get(expiry') == '2018-12-28T12:00:00.000Z' for Z18 its 28 of month Z (december) and year 2018 if expiry is None: market.expiry = '-' else: market.expiry = expiry.group(2) + expiry.group(3) market.market_type = Market.TYPE_CRYPTO market.unit_type = Market.UNIT_CONTRACTS market.contract_type = Market.CONTRACT_CFD if not expiry else Market.CONTRACT_FUTUR market.trade = Market.TRADE_MARGIN | Market.TRADE_IND_MARGIN if bid is not None and ofr is not None: market.bid = bid market.ofr = ofr market.last_update_time = update_time market.lot_size = instrument.get('lotSize', 1.0) # ex: 1.0 for XBTUSD market.contract_size = 1.0 market.value_per_pip = 1.0 market.one_pip_means = instrument.get('tickSize', 1.0) # contract_size need to be updated as price changes if quote_symbol == 'USD' and base_market_id == symbol: # XBTUSD... market.contract_size = 1.0 / instrument.get('lastPrice', 1.0) market.value_per_pip = market.contract_size * instrument.get( 'lastPrice', 1.0) elif base_market_id == symbol: # XBTU19... market.contract_size = 1.0 / instrument.get('lastPrice', 1.0) market.value_per_pip = market.contract_size * instrument.get( 'lastPrice', 1.0) elif quote_symbol == 'USD' and base_market_id != symbol: # ETHUSD... market.contract_size = (0.001 * 0.0001) * instrument.get( 'lastPrice', 1.0) market.value_per_pip = market.contract_size * instrument.get( 'lastPrice', 1.0) elif base_market and base_market_id != symbol: # ADAZ18... market.contract_size = 0.0001 * instrument.get( 'lastPrice', 1.0) market.value_per_pip = market.contract_size * instrument.get( 'lastPrice', 1.0) market.maker_fee = instrument.get('makerFee', 0.0) market.taker_fee = instrument.get('takerFee', 0.0) # store the last market info to be used for backtesting if not self._read_only: Database.inst().store_market_info(( self.name, market_id, market.symbol, market.market_type, market.unit_type, market.contract_type, # type market.trade, market.orders, # type market.base, market.base_display, market.base_precision, # base market.quote, market.quote_display, market.quote_precision, # quote market.expiry, int(market.last_update_time * 1000.0), # expiry, timestamp str(market.lot_size), str(market.contract_size), str(market.base_exchange_rate), str(market.value_per_pip), str(market.one_pip_means), str(market.margin_factor), str(market.min_size), str(market.max_size), str(market.step_size), # size limits str(market.min_notional), str(market.max_notional), str(market.step_notional), # notional limits str(market.min_price), str(market.max_price), str(market.tick_price), # price limits str(market.maker_fee), str(market.taker_fee), str(market.maker_commission), str(market.taker_commission)) # fees ) # notify for strategy self.service.notify(Signal.SIGNAL_MARKET_INFO_DATA, self.name, (market_id, market)) return market
def create_order(self, order): if not self.has_market(order.symbol): logger.error("%s does not support market %s in order %s !" % (self.name, order.symbol, order.order_id)) return if not self._activity: return False postdict = { 'symbol': order.symbol, 'clOrdID': order.ref_order_id, } qty = order.quantity # short means negative quantity if order.direction == Position.SHORT: qty = -qty exec_inst = [] # order type # @todo Order.ORDER_STOP_LIMIT if order.order_type == Order.ORDER_MARKET: postdict['ordType'] = 'Market' postdict['orderQty'] = qty elif order.order_type == Order.ORDER_LIMIT: postdict['ordType'] = 'Limit' postdict['orderQty'] = qty postdict['price'] = order.price # only possible with limit order if order.post_only: exec_inst.append("ParticipateDoNotInitiate") elif order.order_type == Order.ORDER_STOP: postdict['ordType'] = 'Stop' postdict['orderQty'] = qty postdict['stopPx'] = order.stop_price elif order.order_type == Order.ORDER_STOP_LIMIT: postdict['ordType'] = 'StopLimit' postdict['orderQty'] = qty postdict['price'] = order.price postdict['stopPx'] = order.stop_price elif order.order_type == Order.ORDER_TAKE_PROFIT: postdict['ordType'] = 'MarketIfTouched' postdict['orderQty'] = qty postdict['stopPx'] = order.stop_price elif order.order_type == Order.ORDER_TAKE_PROFIT_LIMIT: postdict['ordType'] = 'LimitIfTouched' postdict['orderQty'] = qty postdict['price'] = order.price postdict['stopPx'] = order.stop_price else: postdict['ordType'] = 'Market' postdict['orderQty'] = qty # execution price for stop orders if order.order_type in (Order.ORDER_STOP, Order.ORDER_STOP_LIMIT, Order.ORDER_TAKE_PROFIT, Order.ORDER_TAKE_PROFIT_LIMIT): if order.price_type == Order.PRICE_LAST: exec_inst.append('LastPrice') elif order.price_type == Order.PRICE_INDEX: exec_inst.append('IndexPrice') elif order.price_type == Order.PRICE_MARK: exec_inst.append('MarkPrice') if order.reduce_only: exec_inst.append("ReduceOnly") # exec_inst.append("Close") # distinct for reduce only but close imply reduceOnly # close implies a qty or a side if exec_inst: postdict['execInst'] = ','.join(exec_inst) logger.info("Trader %s order %s %s @%s %s" % (self.name, order.direction_to_str(), order.symbol, order.price, order.quantity)) try: result = self._watcher.connector.request(path="order", postdict=postdict, verb='POST', max_retries=15) except Exception as e: logger.error(str(e)) return False if result and result.get('ordRejReason'): logger.error("%s rejected order %s from %s %s - cause : %s !" % ( self.name, order.direction_to_str(), order.quantity, order.symbol, result['ordRejReason'])) return False # store the order with its order id order.set_order_id(result['orderID']) order.created_time = self._parse_datetime(result.get('timestamp')).replace(tzinfo=UTC()).timestamp().timestamp() order.transact_time = self._parse_datetime(result.get('transactTime')).replace(tzinfo=UTC()).timestamp().timestamp() self._orders[order.order_id] = order return True
def do_rebuilder(options): Terminal.inst().info("Starting SIIS rebuilder using %s identity..." % options['identity']) Terminal.inst().flush() # database manager Database.create(options) Database.inst().setup(options) timeframe = -1 cascaded = None if not options.get('timeframe'): timeframe = 60 # default to 1min else: if options['timeframe'] in TIMEFRAME_FROM_STR_MAP: timeframe = TIMEFRAME_FROM_STR_MAP[options['timeframe']] else: try: timeframe = int(options['timeframe']) except: pass if not options.get('cascaded'): cascaded = None else: if options['cascaded'] in TIMEFRAME_FROM_STR_MAP: cascaded = TIMEFRAME_FROM_STR_MAP[options['cascaded']] else: try: cascaded = int(options['cascaded']) except: pass if timeframe < 0: logger.error("Invalid timeframe") sys.exit(-1) from_date = options.get('from') to_date = options.get('to') if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) timeframe = options['timeframe'] if timeframe > 0 and timeframe not in GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe,)) return for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue generators = [] from_tf = timeframe last_ticks = [] last_ohlcs = {} if timeframe == Instrument.TF_TICK: tick_streamer = Database.inst().create_tick_streamer(options['broker'], market, from_date=from_date, to_date=to_date) else: ohlc_streamer = Database.inst().create_ohlc_streamer(options['broker'], market, timeframe, from_date=from_date, to_date=to_date) # cascaded generation of candles if cascaded: for tf in GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: last_ohlcs[timeframe] = [] n = 0 t = 0 timestamp = from_date.timestamp() + Instrument.TF_1M if timeframe == 0: while not tick_streamer.finished(): ticks = tick_streamer.next(timestamp) timestamp += Instrument.TF_1M # by step of 1M for data in ticks: if generators: last_ticks.append((float(data[0]) * 0.001, float(data[1]), float(data[2]), float(data[3]))) # generate higher candles for generator in generators: if generator.from_tf == 0: candles = generator.generate_from_ticks(last_ticks) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf] += candles # remove consumed ticks last_ticks = [] else: candles = generator.generate_from_candles(last_ohlcs[generator.from_tf]) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf] += candles # remove consumed candles last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info("%i..." % t) Terminal.inst().flush() # calm down the storage of tick, if parsing is faster while Database.inst().num_pending_ticks_storage() > TICK_STORAGE_DELAY: time.sleep(TICK_STORAGE_DELAY) # wait a little before continue logger.info("Read %i trades" % t) elif timeframe > 0: while not ohlc_streamer.finished(): ohlcs = ohlc_streamer.next(timestamp) timestamp += Instrument.TF_1M # by step of 1M for data in ohlcs: if generators: candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) candle.set_consolidated(True) last_ohlcs[timeframe].append(candle) # generate higher candles for generator in generators: candles = generator.generate_from_candles(last_ohlcs[generator.from_tf]) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf].extend(candles) # remove consumed candles last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info("%i..." % t) logger.info("Read %i candles" % t) Terminal.inst().info("Flushing database...") Terminal.inst().flush() Database.terminate() Terminal.inst().info("Rebuild done!") Terminal.inst().flush() Terminal.terminate() sys.exit(0)
def fetch_and_generate(self, market_id, timeframe, n_last=1, cascaded=None): """ For initial fetching of the current OHLC. """ if timeframe > 0 and timeframe not in self.GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe, )) return generators = [] from_tf = timeframe if not market_id in self._last_ohlc: self._last_ohlc[market_id] = {} # compute a from date today = datetime.now().astimezone(UTC()) from_date = today - timedelta(seconds=timeframe * n_last) to_date = today last_ohlcs = {} # cascaded generation of candles if cascaded: for tf in Watcher.GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: last_ohlcs[timeframe] = [] n = 0 for data in self.fetch_candles(market_id, timeframe, from_date, to_date, None): # store (int timestamp in ms, str bid, str ofr, str volume) if not self._read_only: Database.inst().store_market_trade( (self.name, market_id, data[0], data[1], data[2], data[3])) candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) if candle.timestamp >= Instrument.basetime(timeframe, time.time()): candle.set_consolidated(False) # current last_ohlcs[timeframe].append(candle) # only the last self._last_ohlc[market_id][timeframe] = candle # generate higher candles for generator in generators: candles = generator.generate_from_candles( last_ohlcs[generator.from_tf], False) if candles: if not self._read_only: for c in candles: self.store_candle(market_id, generator.to_tf, c) last_ohlcs[generator.to_tf].extend(candles) # only the last as current self._last_ohlc[market_id][generator.to_tf] = candles[-1] elif generator.current: self._last_ohlc[market_id][ generator.to_tf] = generator.current # remove consumed candles last_ohlcs[generator.from_tf] = [] n += 1 for k, ohlc in self._last_ohlc[market_id].items(): if ohlc: ohlc.set_consolidated(False)
def do_rebuilder(options): Terminal.inst().info("Starting SIIS rebuilder using %s identity..." % options['identity']) Terminal.inst().flush() # database manager Database.create(options) Database.inst().setup(options) timeframe = -1 cascaded = None if not options.get('timeframe'): timeframe = 60 # default to 1min else: if options['timeframe'] in TIMEFRAME_FROM_STR_MAP: timeframe = TIMEFRAME_FROM_STR_MAP[options['timeframe']] else: try: timeframe = int(options['timeframe']) except: pass if not options.get('cascaded'): cascaded = None else: if options['cascaded'] in TIMEFRAME_FROM_STR_MAP: cascaded = TIMEFRAME_FROM_STR_MAP[options['cascaded']] else: try: cascaded = int(options['cascaded']) except: pass if timeframe < 0: logger.error("Invalid timeframe") sys.exit(-1) from_date = options.get('from') to_date = options.get('to') if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) if timeframe > 0 and timeframe not in GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe,)) return for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue timestamp = from_date.timestamp() to_timestamp = to_date.timestamp() progression = 0.0 prev_update = timestamp count = 0 total_count = 0 progression_incr = (to_timestamp - timestamp) * 0.01 tts = 0.0 prev_tts = 0.0 generators = [] from_tf = timeframe last_ticks = [] last_ohlcs = {} if timeframe == Instrument.TF_TICK: tick_streamer = Database.inst().create_tick_streamer(options['broker'], market, from_date=from_date, to_date=to_date) else: ohlc_streamer = Database.inst().create_ohlc_streamer(options['broker'], market, timeframe, from_date=from_date, to_date=to_date) # cascaded generation of candles if cascaded: for tf in GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: last_ohlcs[timeframe] = [] if timeframe == 0: while not tick_streamer.finished(): ticks = tick_streamer.next(timestamp + Instrument.TF_1M) count = len(ticks) total_count += len(ticks) for data in ticks: if data[0] > to_timestamp: break if generators: last_ticks.append(data) # generate higher candles for generator in generators: if generator.from_tf == 0: candles = generator.generate_from_ticks(last_ticks) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf] += candles # remove consumed ticks last_ticks = [] else: candles = generator.generate_from_candles(last_ohlcs[generator.from_tf]) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf] += candles # remove consumed candles last_ohlcs[generator.from_tf] = [] if timestamp - prev_update >= progression_incr: progression += 1 Terminal.inst().info("%i%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (progression, format_datetime(timestamp), count, total_count)) prev_update = timestamp count = 0 if timestamp > to_timestamp: break timestamp += Instrument.TF_1M # by step of 1m # calm down the storage of tick, if parsing is faster while Database.inst().num_pending_ticks_storage() > TICK_STORAGE_DELAY: time.sleep(TICK_STORAGE_DELAY) # wait a little before continue elif timeframe > 0: while not ohlc_streamer.finished(): ohlcs = ohlc_streamer.next(timestamp + timeframe * 100) # per 100 count = len(ohlcs) total_count += len(ohlcs) for data in ohlcs: if data.timestamp > to_timestamp: break if generators: last_ohlcs[timeframe].append(candle) # generate higher candles for generator in generators: candles = generator.generate_from_candles(last_ohlcs[generator.from_tf]) if candles: for c in candles: store_ohlc(options['broker'], market, generator.to_tf, c) last_ohlcs[generator.to_tf].extend(candles) # remove consumed candles last_ohlcs[generator.from_tf] = [] prev_tts = tts timestamp = tts if timestamp - prev_update >= progression_incr: progression += 1 Terminal.inst().info("%i%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (progression, format_datetime(timestamp), count, total_count)) prev_update = timestamp count = 0 if timestamp > to_timestamp: break if total_count == 0: timestamp += timeframe * 100 if progression < 100: Terminal.inst().info("100%% on %s, %s ticks/trades for 1 minute, current total of %s..." % (format_datetime(timestamp), count, total_count)) Terminal.inst().info("Flushing database...") Terminal.inst().flush() Database.terminate() Terminal.inst().info("Rebuild done!") Terminal.inst().flush() Terminal.terminate() sys.exit(0)
def __update_positions(self, symbol, market): if not self.connected: return # position for each configured market for symbol, market in self._markets.items(): pos = self._watcher.connector.ws.position(symbol) position = None if self._positions.get(symbol): position = self._positions.get(symbol) elif pos['isOpen']: # insert the new position position = Position(self) position.set_position_id(symbol) position.set_key(self.service.gen_key()) quantity = abs(float(pos['currentQty'])) direction = Position.SHORT if pos['currentQty'] < 0 else Position.LONG position.entry(direction, symbol, quantity) position.leverage = pos['leverage'] position.entry_price = pos['avgEntryPrice'] position.created_time = datetime.strptime(pos['openingTimestamp'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=UTC()).timestamp().timestamp() # .%fZ") # id is symbol self._positions[symbol] = position elif (not pos['isOpen'] or pos['currentQty'] == 0) and self._positions.get(symbol): # no more position del self._positions[symbol] if position: # absolute value because we work with positive quantity + direction information position.quantity = abs(float(pos['currentQty'])) position.direction = Position.SHORT if pos['currentQty'] < 0 else Position.LONG position.leverage = pos['leverage'] # position.market_close = pos['market_close'] position.entry_price = pos['avgEntryPrice'] position.created_time = datetime.strptime(pos['openingTimestamp'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=UTC()).timestamp().timestamp() # .%fZ")
def __update_orders(self): if not self.connected: return # filters only siis managed orders src_orders = self._watcher.connector.ws.open_orders("") # "siis_") # first delete older orders order_rm_list = [] for k, order in self._orders.items(): found = False for src_order in src_orders: src_order_id = src_order['clOrdID'] or src_order['orderID'] if order.order_id == src_order['clOrdID'] or order.order_id == src_order['orderID']: found = True break if not found: order_rm_list.append(order.order_id) for order_id in order_rm_list: del self._orders[order_id] # insert or update active orders for src_order in src_orders: found = False src_order_id = src_order['clOrdID'] or src_order['orderID'] order = self._orders.get(src_order_id) if order is None: # insert order = Order(self, src_order['symbol']) order.set_order_id(src_order_id) self._orders[order.order_id] = order else: order = self._orders.get(src_order_id) # logger.info(src_order) # probably modifier or when leavesQty is update the ordStatus must change # if src_order['ordStatus'] != "New": # continue # update order.direction = Position.LONG if src_order['side'] == 'Buy' else Position.SHORT # 'orderQty' (ordered qty), 'cumQty' (cumulative done), 'leavesQty' (remaning) order.quantity = src_order.get('leavesQty', src_order.get('orderQty', 0)) if src_order.get('transactTime'): order.transact_time = self._parse_datetime(src_order.get('transactTime')).replace(tzinfo=UTC()).timestamp() if src_order['ordType'] == "Market": order.order_type = Order.ORDER_MARKET elif src_order['ordType'] == "Limit": order.order_type = Order.ORDER_LIMIT order.price = src_order.get('price') elif src_order['ordType'] == "Stop": order.order_type = Order.ORDER_STOP order.stop_price = src_order.get('stopPx') elif src_order['ordType'] == "StopLimit": order.order_type = Order.ORDER_STOP_LIMIT order.price = src_order.get('price') order.stop_price = src_order.get('stopPx') elif src_order['ordType'] == "MarketIfTouched": order.order_type = Order.ORDER_TAKE_PROFIT order.stop_price = src_order.get('stopPx') elif src_order['ordType'] == "LimitIfTouched": order.order_type = Order.ORDER_TAKE_PROFIT_LIMIT order.price = src_order.get('price') order.stop_price = src_order.get('stopPx') if src_order['timeInForce'] == 'GoodTillCancel': order.time_in_force = Order.TIME_IN_FORCE_GTC elif src_order['timeInForce'] == 'ImmediateOrCancel': order.time_in_force = Order.TIME_IN_FORCE_IOC elif src_order['timeInForce'] == 'FillOrKill': order.time_in_force = Order.TIME_IN_FORCE_FOK else: order.time_in_force = Order.TIME_IN_FORCE_GTC # triggered, ordRejReason, currency # @todo # execution options exec_inst = src_order['execInst'].split(',') # taker or maker fee if 'ParticipateDoNotInitiate' in exec_inst: order.post_only = True else: order.post_only = False # close reduce only if 'Close' in exec_inst: # close only order (must be used with reduce only, only reduce a position, and close opposites orders) order.close_only = True else: order.close_only = False # close reduce only if 'ReduceOnly' in exec_inst: # reduce only order (only reduce a position) order.reduce_only = True else: order.redeuce_only = False # execution price if 'LastPrice' in exec_inst: order.price_type = Order.PRICE_LAST elif 'IndexPrice' in exec_inst: order.price_type = Order.PRICE_MARK elif 'MarkPrice' in exec_inst: order.price_type = Order.PRICE_INDEX
def do_optimizer(options): Terminal.inst().info("Starting SIIS optimizer...") Terminal.inst().flush() # database manager Database.create(options) Database.inst().setup(options) broker_id = options['broker'] market_id = options['market'] timeframe = None from_date = options.get('from') to_date = options.get('to') if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) if not options.get('timeframe'): timeframe = None else: if options['timeframe'] in TIMEFRAME_FROM_STR_MAP: timeframe = TIMEFRAME_FROM_STR_MAP[options['timeframe']] else: try: timeframe = int(options['timeframe']) except: pass try: # checking data integrity, gap... if timeframe is None: for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue for tf in GENERATED_TF: Terminal.inst().info("Verifying %s OHLC %s..." % (market, timeframe_to_str(tf))) check_ohlcs(options['broker'], market, tf, from_date, to_date) elif timeframe == Instrument.TF_TICK: for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue Terminal.inst().info("Verifying %s ticks/trades..." % (market,)) check_ticks(options['broker'], market, from_date, to_date) elif timeframe > 0: # particular ohlc for market in options['market'].split(','): if market.startswith('!') or market.startswith('*'): continue Terminal.inst().info("Verifying %s OHLC %s..." % (market, timeframe_to_str(timeframe))) check_ohlcs(options['broker'], market, timeframe, from_date, to_date) except KeyboardInterrupt: pass finally: pass Terminal.inst().info("Flushing database...") Terminal.inst().flush() Database.terminate() Terminal.inst().info("Optimization done!") Terminal.inst().flush() Terminal.terminate() sys.exit(0)
def on_trade_update(self, item_update): name = item_update.get('name', '').split(':') try: if len(name) == 2 and name[0] == 'TRADE' and name[1] == self._account_id: # live trade updates values = item_update['values'] # # active waiting order (open/updated/deleted) # if values.get('WOU'): data = json.loads(values.get('WOU')) exec_logger.info("ig.com WOU %s" % str(data)) order_id = data['dealId'] ref_order_id = data['dealReference'] epic = data['epic'] # date of the event 2018-09-13T20:36:01.096 without Z event_time = datetime.strptime(data['timestamp'], '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=UTC()).timestamp() if data.get('direction', '') == 'BUY': direction = Order.LONG elif data.get('direction', '') == 'SELL': direction = Order.SHORT else: direction = 0 if data.get('dealStatus', "") == 'REJECTED': pass elif data.get('dealStatus', "") == 'ACCEPTED': quantity = float(data.get('size')) if data.get('size') is not None else 0.0 level = float(data['level']) if data.get('level') is not None else None stop_distance = float(data['stopDistance']) if data.get('stopDistance') is not None else None limit_distance = float(data['limitDistance']) if data.get('limitDistance') is not None else None guaranteed_stop = data.get('guaranteedStop', False) currency = data.get('currency', "") if data.get('orderType'): if data['orderType'] == "LIMIT": order_type = Order.ORDER_LIMIT elif data['orderType'] == "STOP": order_type = Order.ORDER_STOP else: order_type = Order.ORDER_MARKET else: order_type = Order.ORDER_MARKET if data.get('timeInForce'): if data['timeInForce'] == "GOOD_TILL_CANCELLED": time_in_force = Order.TIME_IN_FORCE_GTC elif data['timeInForce'] == "GOOD_TILL_DATE": time_in_force = Order.TIME_IN_FORCE_GTD # data['goodTillDate'] @todo till date else: time_in_force = Order.TIME_IN_FORCE_GTC status = data.get('status', "") if status == "OPEN": order_data = { 'id': order_id, 'type': order_type, 'time-in-force': time_in_force, 'price': level if order_type == Order.ORDER_LIMIT else None, 'stop-price': level if order_type == Order.ORDER_STOP else None, 'stop-loss': stop_distance, 'take-profit': limit_distance } self.service.notify(Signal.SIGNAL_ORDER_OPENED, self.name, (epic, order_data, ref_order_id)) elif status == "UPDATED": # signal of updated order order_data = { 'id': order_id, 'type': order_type, 'time-in-force': time_in_force, 'price': level if order_type == Order.ORDER_LIMIT else None, 'stop-price': level if order_type == Order.ORDER_STOP else None, 'stop-loss': stop_distance, 'take-profit': limit_distance } self.service.notify(Signal.SIGNAL_ORDER_UPDATED, self.name, (epic, order_data, ref_order_id)) elif status == "DELETED": # signal of deleted order self.service.notify(Signal.SIGNAL_ORDER_DELETED, self.name, (epic, order_id, ref_order_id)) # # order confirms (accepted/rejected) # if values.get('CONFIRMS'): data = json.loads(values.get('CONFIRMS')) exec_logger.info("ig.com CONFIRMS %s" % str(data)) epic = data.get('epic') if data.get('dealStatus', "") == "REJECTED": ref_order_id = data['dealReference'] self.service.notify(Signal.SIGNAL_ORDER_REJECTED, self.name, (epic, ref_order_id)) elif data.get('dealStatus', "") == "ACCEPTED": # deal confirmed and accepted order_id = data['dealId'] ref_order_id = data['dealReference'] # date 2018-09-13T20:36:01.096 without Z event_time = datetime.strptime(data['date'], '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=UTC()).timestamp() # direction of the trade if data['direction'] == 'BUY': direction = Order.LONG elif data['direction'] == 'SELL': direction = Order.SHORT else: direction = 0 level = float(data['level']) if data.get('level') is not None else None # exec price quantity = float(data['size']) if data.get('size') is not None else 0.0 stop_level = float(data['stopLevel']) if data.get('stopLevel') is not None else 0.0 limit_level = float(data['limitLevel']) if data.get('limitLevel') is not None else 0.0 profit_loss = float(data['profit']) if data.get('profit') is not None else 0.0 profit_currency = data.get('profitCurrency', "") # 'expiry', 'guaranteedStop' # affected positions, normaly should not be necessary except if user create a manual trade that could reduce an existing position # for affected_deal in data.get('affectedDeals', []): # position_id = affected_deal['dealId'] # status = affected_deal.get('status', "") # if status == "AMENDED": # pass # elif status == "DELETED": # pass # elif status == "FULLY_CLOSED": # pass # elif status == "OPENED": # pass # elif status == "PARTIALLY_CLOSED": # pass status = data.get('status', "") if status == "AMENDED": # traded and initial order = { 'id': order_id, 'symbol': epic, 'timestamp': event_time, 'direction': direction, 'quantity': None, # no have 'filled': None, # no have 'cumulative-filled': quantity, 'exec-price': level, 'avg-price': None, 'stop-loss': stop_level, 'take-profit': limit_level, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'info': 'amended' } self.service.notify(Signal.SIGNAL_ORDER_TRADED, self.name, (epic, order, ref_order_id)) elif status == "CLOSED": # traded and completed order = { 'id': order_id, 'symbol': epic, 'timestamp': event_time, 'direction': direction, 'quantity': None, 'filled': None, 'cumulative-filled': quantity, 'exec-price': level, 'avg-price': None, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'info': 'closed' } if data.get('limitLevel') and data.get('stopLevel'): order['type'] = Order.ORDER_STOP_LIMIT elif data.get('limitLevel'): order['type'] = Order.ORDER_LIMIT elif data.get('stopLevel'): order['type'] = Order.ORDER_STOP else: order['type'] = Order.ORDER_MARKET self.service.notify(Signal.SIGNAL_ORDER_TRADED, self.name, (epic, order, ref_order_id)) self.service.notify(Signal.SIGNAL_ORDER_DELETED, self.name, (epic, order_id, "")) elif status == "DELETED": # deleted why for, we never receive them self.service.notify(Signal.SIGNAL_ORDER_DELETED, self.name, (epic, order_id, "")) elif status == "OPEN": # traded and initial order = { 'id': order_id, 'symbol': epic, 'timestamp': event_time, 'direction': direction, 'quantity': None, # no have 'filled': None, # no have 'cumulative-filled': quantity, 'exec-price': level, 'avg-price': None, 'stop-loss': stop_level, 'take-profit': limit_level, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'info': 'open' } if data.get('limitLevel') and data.get('stopLevel'): order['type'] = Order.ORDER_STOP_LIMIT order['price'] = float(data.get('limitLevel')) order['stop-price'] = float(data.get('stopLevel')) elif data.get('limitLevel'): order['type'] = Order.ORDER_LIMIT order['price'] = float(data.get('limitLevel')) elif data.get('stopLevel'): order['type'] = Order.ORDER_STOP order['stop-price'] = float(data.get('stopLevel')) else: order['type'] = Order.ORDER_MARKET # @todo 'limitDistance' 'stopDistance' 'trailingStop' self.service.notify(Signal.SIGNAL_ORDER_OPENED, self.name, (epic, order, ref_order_id)) if quantity > 0.0: self.service.notify(Signal.SIGNAL_ORDER_TRADED, self.name, (epic, order, ref_order_id)) elif status == "PARTIALLY_CLOSED": # traded and partially completed order = { 'id': order_id, 'symbol': epic, 'timestamp': event_time, 'direction': direction, 'quantity': None, # no have 'filled': None, # no have 'cumulative-filled': quantity, 'exec-price': level, 'avg-price': None, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'info': 'partially-closed' } self.service.notify(Signal.SIGNAL_ORDER_TRADED, self.name, (epic, order, ref_order_id)) # # active position (open/updated/deleted) # if values.get('OPU'): data = json.loads(values.get('OPU')) exec_logger.info("ig.com OPU %s" % str(data)) position_id = data['dealId'] ref_order_id = data['dealReference'] epic = data.get('epic') # "channel": "WTP", "expiry": "-" # date of the event 2018-09-13T20:36:01.096 without Z event_time = datetime.strptime(data['timestamp'], '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=UTC()).timestamp() if data.get('direction', '') == 'BUY': direction = Order.LONG elif data.get('direction', '') == 'SELL': direction = Order.SHORT else: direction = Order.LONG if data.get('dealStatus', "") == "REJECTED": pass elif data.get('dealStatus', "") == "ACCEPTED": quantity = float(data.get('size')) if data.get('size') is not None else 0.0 level = float(data['level']) if data.get('level') is not None else None stop_level = float(data['stopLevel']) if data.get('stopLevel') is not None else None limit_level = float(data['limitLevel']) if data.get('limitLevel') is not None else None profit_loss = float(data['profit']) if data.get('profit') is not None else 0.0 profit_currency = data.get('profitCurrency', "") # @todo trailingStep, trailingStopDistance, guaranteedStop status = data.get('status', "") if status == "OPEN": # signal of opened position position_data = { 'id': position_id, 'symbol': epic, 'direction': direction, 'timestamp': event_time, 'quantity': quantity, 'exec-price': level, 'avg-price': level, 'avg-entry-price': level, # entry 'stop-loss': stop_level, 'take-profit': limit_level, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'cumulative-filled': None, 'filled': None, 'liquidation-price': None } self.service.notify(Signal.SIGNAL_POSITION_OPENED, self.name, (epic, position_data, ref_order_id)) elif status == "UPDATED": # signal of updated position position_data = { 'id': position_id, 'symbol': epic, 'direction': direction, 'timestamp': event_time, 'quantity': quantity, 'exec-price': level, 'avg-entry-price': level, # entry 'avg-price': level, 'stop-loss': stop_level, 'take-profit': limit_level, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'cumulative-filled': None, 'filled': None, 'liquidation-price': None } self.service.notify(Signal.SIGNAL_POSITION_UPDATED, self.name, (epic, position_data, ref_order_id)) elif status == "DELETED": # signal of deleted position position_data = { 'id': position_id, 'symbol': epic, 'direction': direction, 'timestamp': event_time, 'quantity': quantity, 'exec-price': level, 'avg-price': level, 'avg-exit-price': level, # exit 'stop-loss': stop_level, 'take-profit': limit_level, 'profit-loss': profit_loss, 'profit-currency': profit_currency, 'cumulative-filled': None, 'filled': None, 'liquidation-price': None } self.service.notify(Signal.SIGNAL_POSITION_DELETED, self.name, (epic, position_data, ref_order_id)) except Exception as e: error_logger.error(repr(e)) traceback_logger.error(traceback.format_exc())
def application(argv): fix_thread_set_name() options = { 'identity': 'real', 'config-path': './user/config', 'connectors-path': './user/config/connectors', 'log-path': './user/log', 'reports-path': './user/reports', 'markets-path': './user/markets', 'log-name': 'connector.log', 'default': "connector.json", 'connector-config': "", 'database': { 'name': "siis", 'type': "pgsql", 'host': "127.0.0.1", 'port': 5432, 'user': "******", 'password': "******" }, 'cache': { 'name': "siis", 'type': "redis", 'host': "127.0.0.1", 'port': 6379, 'user': "******", 'password': "******" }, 'strategy': { 'protocol': "tcp", 'host': "127.0.0.1", 'port': 5600 }, 'connector': { 'name': "", 'host': "" }, 'markets': {}, 'connectors': { 'binance.com': { 'classpath': 'connectors.binance.connector.BinanceConnector', }, 'bitmex.com': { 'classpath': 'connectors.bitmex.connector.BitMexConnector', }, 'ig.com': { 'classpath': 'connectors.ig.connector.IGConnector', }, } } # create initial siis data structure if necessary install(options) siis_log = SiisLog(options, "uterm") siis_logger = logging.getLogger('siis.connector') if len(argv) > 1: # utc or local datetime ? for n, arg in enumerate(argv): if arg.startswith('-'): if (arg == '--fetch' or arg == '-F'): # use the fetcher options['fetch'] = True elif (arg == '--spec' or arg == '-S') and n + 1 < len(argv): # fetcher data history option options['option'] = argv[n + 1] elif arg == '--binarize': # use the binarizer options['binarize'] = True elif arg == '--sync': # use the syncer options['sync'] = True elif (arg == '--connector' or arg == '-c') and n + 1 < len(argv): # connector conf filename options['connector-config'] = argv[n + 1] elif (arg == '--from' or arg == '-f') and n + 1 < len(argv): # if backtest from date (if ommited use whoole data) date format is "yyyy-mm-dd-hh:mm:ss", fetch, binarize to date options['from'] = datetime.datetime.strptime( argv[n + 1], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif (arg == '--to' or arg == '-t') and n + 1 < len(argv): # if backtest to date (can be ommited), fetch, binarize to date options['to'] = datetime.datetime.strptime( argv[n + 1], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif (arg == '--last' or arg == '-l') and n + 1 < len(argv): # fetch the last n data history options['last'] = int(argv[n + 1]) elif (arg == '--market' or arg == '-m') and n + 1 < len(argv): # fetch, binarize the data history for this market options['market'] = argv[n + 1] elif (arg == '--timeframe' or arg == '-s') and n + 1 < len(argv): # fetch, binarize base timeframe options['timeframe'] = argv[n + 1] elif (arg == '--cascaded=' or arg == '-C') and n + 1 < len(argv): # fetch cascaded ohlc generation options['cascaded'] = argv[n + 1] elif arg == '--read-only': options['read-only'] = True elif arg == '--check-data': options['check-data'] = True elif arg == '--version' or arg == '-v': print('%s %s' % (APP_SHORT_NAME, '.'.join( [str(x) for x in APP_VERSION]))) sys.exit(0) elif arg == '--help' or arg == '-h': display_cmd_line_help() sys.exit(0) # replay if options.get('replay', False): if options.get('from') is None or options.get('to') is None: del options['replay'] print("Replay need from= and to= date time") sys.exit(-1) if not options['connector-config']: print("Connector configuration filename must be specified") sys.exit(-1) # # config # parse_config(siis_logger, options) parse_connector_spec(siis_logger, options) siis_log.upgrade(options) # # binarizer # if options.get('binarize'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('connector'): do_binarizer(options, siis_logger) else: display_cmd_line_help() sys.exit(0) # # fetcher mode # if options.get('fetch'): if options.get('market') and options.get('connector') and options.get( 'timeframe'): do_fetcher(options, siis_logger) else: display_cmd_line_help() sys.exit(0) # # running mode # print("Starting SIIS simple runner using %s..." % options['connector-config']) print("Hit CTRL-C twice to terminate") if options.get('replay'): print("Process a replay.") # monitoring service # print("Starting monitor service...") # monitor_service = MonitorService(options) # monitor_service.start() # database manager Database.create(options) Database.inst().setup(options) print("Starting connector handler...") handler = DefaultHandler(options) handler.init(options) handler.start() run(siis_logger, handler) print("Terminate...") handler.stop() handler.terminate() handler = None print("Saving database...") Database.terminate() print("Bye!")