def _getnexteos(self): '''Returns the next eos using a trading calendar if available''' if self._clone: return self.data._getnexteos() if not len(self): return datetime.datetime.min, 0.0 dt = self.lines.datetime[0] dtime = num2date(dt) if self._calendar is None: nexteos = datetime.datetime.combine(dtime, self.p.sessionend) nextdteos = self.date2num(nexteos) # locl'ed -> utc-like nexteos = num2date(nextdteos) # utc while dtime > nexteos: nexteos += datetime.timedelta(days=1) # already utc-like nextdteos = date2num(nexteos) # -> utc-like else: # returns times in utc _, nexteos = self._calendar.schedule(dtime, self._tz) nextdteos = date2num(nexteos) # nextos is already utc return nexteos, nextdteos
def _load_rtbar(self, rtbar, hist=False, hist_tzo=None): # A complete 5 second bar made of real-time ticks is delivered and # contains open/high/low/close/volume prices # The historical data has the same data but with 'date' instead of # 'time' for datetime if hist: if hist_tzo is None: hist_tzo = time.timezone / 3600 rtbar.date = rtbar.date + datetime.timedelta(hours=hist_tzo) dt = date2num(rtbar.date) else: dt = date2num(rtbar.time) if dt < self.lines.datetime[-1] and not self.p.latethrough: return False # cannot deliver earlier than already delivered self.lines.datetime[0] = dt # Put the tick into the bar try: self.lines.open[0] = rtbar.open except AttributeError: self.lines.open[0] = rtbar.open_ self.lines.high[0] = rtbar.high self.lines.low[0] = rtbar.low self.lines.close[0] = rtbar.close self.lines.volume[0] = rtbar.volume self.lines.openinterest[0] = 0 return True
def _load(self): self._idx += 1 try: bar = self.cursor.fetchone() while None in bar: bar = self.cursor.fetchone() self.lines.datetime[0] = date2num(bar[0]) self.lines.open[0] = bar[1] self.lines.high[0] = bar[2] self.lines.low[0] = bar[3] self.lines.close[0] = bar[4] self.lines.volume[0] = bar[5] #self.lines.openinterest[0] = 0 return True except TypeError: self.live -= 1 if self.p.sleep == 0: pass else: time.sleep(3) if self.live == 1: print('yes'+self.p.dataname) self.lines.datetime[0] = date2num(datetime.datetime(2020,3,3,10,0,0)) self.lines.open[0] = 4000 self.lines.high[0] = 4010 self.lines.low[0] = 3990 self.lines.close[0] = 4001 self.lines.volume[0] = 100000 return True else: self.conn.close() return False
def dopostinit(cls, _obj, *args, **kwargs): _obj, args, kwargs = \ super(MetaAbstractDataBase, cls).dopostinit(_obj, *args, **kwargs) _obj._name = _obj.p.name _obj._compression = _obj.p.compression _obj._timeframe = _obj.p.timeframe if _obj.p.sessionstart is None: _obj.p.sessionstart = datetime.time(0, 0, 0) if _obj.p.sessionend is None: _obj.p.sessionend = datetime.time(23, 59, 59) if isinstance(_obj.p.fromdate, datetime.date): # push it to the end of the day, or else intraday # values before the end of the day would be gone _obj.p.fromdate = datetime.datetime.combine( _obj.p.fromdate, _obj.p.sessionstart) if isinstance(_obj.p.todate, datetime.date): # push it to the end of the day, or else intraday # values before the end of the day would be gone _obj.p.todate = datetime.datetime.combine( _obj.p.todate, _obj.p.sessionend) _obj.fromdate = date2num(_obj.p.fromdate) _obj.todate = date2num(_obj.p.todate) _obj.sessionstart = time2num(_obj.p.sessionstart) _obj.sessionend = time2num(_obj.p.sessionend) # hold datamaster points corresponding to own _obj.mlen = list() return _obj, args, kwargs
def dopostinit(cls, _obj, *args, **kwargs): _obj, args, kwargs = \ super(MetaAbstractDataBase, cls).dopostinit(_obj, *args, **kwargs) _obj._name = _obj.p.name _obj._compression = _obj.p.compression _obj._timeframe = _obj.p.timeframe if isinstance(_obj.p.sessionstart, datetime.datetime): _obj.p.sessionstart = _obj.p.sessionstart.time() if _obj.p.sessionstart is None: _obj.p.sessionstart = datetime.time(0, 0, 0) if isinstance(_obj.p.sessionend, datetime.datetime): _obj.p.sessionend = _obj.p.sessionend.time() if _obj.p.sessionend is None: _obj.p.sessionend = datetime.time(23, 59, 59) if isinstance(_obj.p.fromdate, datetime.date): # push it to the end of the day, or else intraday # values before the end of the day would be gone _obj.p.fromdate = datetime.datetime.combine( _obj.p.fromdate, _obj.p.sessionstart) if isinstance(_obj.p.todate, datetime.date): # push it to the end of the day, or else intraday # values before the end of the day would be gone _obj.p.todate = datetime.datetime.combine(_obj.p.todate, _obj.p.sessionend) _obj.fromdate = date2num(_obj.p.fromdate) _obj.todate = date2num(_obj.p.todate) _obj.sessionstart = time2num(_obj.p.sessionstart) _obj.sessionend = time2num(_obj.p.sessionend) # hold datamaster points corresponding to own _obj.mlen = list() _obj._barstack = collections.deque() # for filter operations _obj._filters = list() _obj._ffilters = list() for fp in _obj.p.filters: if inspect.isclass(fp): fp = fp(_obj) if hasattr(fp, 'last'): _obj._ffilters.append((fp, [], {})) _obj._filters.append((fp, [], {})) return _obj, args, kwargs
def write_csv( self, symbol: str, timeframe: str, compression: int = 1, fromdate: datetime = None, todate: datetime = None, ) -> None: """Request MT5 to write history data to CSV a file""" if fromdate is None: fromdate = float("-inf") else: fromdate = date2num(fromdate) if todate is None: todate = float("inf") else: todate = date2num(todate) date_begin = num2date(fromdate) if fromdate > float("-inf") else None date_end = num2date(todate) if todate < float("inf") else None begin = end = None if date_begin: begin = int((date_begin - self._DTEPOCH).total_seconds()) if date_end: end = int((date_end - self._DTEPOCH).total_seconds()) tf = self.get_granularity(timeframe, compression) if self.debug: print( "Request CSV write with Fetching: {}, Timeframe: {}, Fromdate: {}" .format(symbol, tf, date_begin)) ret_val = self.oapi.construct_and_send( action="HISTORY", actionType="WRITE", symbol=symbol, chartTF=tf, fromDate=begin, toDate=end, ) if ret_val["error"]: print(ret_val) raise ServerConfigError(ret_val["description"]) # self.put_notification(ret_val["description"]) else: self.put_notification( f"Request to write CVS data for symbol {tf} and timeframe {tf} succeeded. Check MT5 EA logging for the exact output location ..." )
def dopostinit(cls, _obj, *args, **kwargs): _obj, args, kwargs = \ super(MetaAbstractDataBase, cls).dopostinit(_obj, *args, **kwargs) _obj._name = _obj.p.name _obj._compression = _obj.p.compression _obj._timeframe = _obj.p.timeframe if isinstance(_obj.p.sessionstart, datetime.datetime): _obj.p.sessionstart = _obj.p.sessionstart.time() if _obj.p.sessionstart is None: _obj.p.sessionstart = datetime.time(0, 0, 0) if isinstance(_obj.p.sessionend, datetime.datetime): _obj.p.sessionend = _obj.p.sessionend.time() if _obj.p.sessionend is None: _obj.p.sessionend = datetime.time(23, 59, 59) if isinstance(_obj.p.fromdate, datetime.date): # push it to the end of the day, or else intraday # values before the end of the day would be gone _obj.p.fromdate = datetime.datetime.combine( _obj.p.fromdate, _obj.p.sessionstart) if isinstance(_obj.p.todate, datetime.date): # push it to the end of the day, or else intraday # values before the end of the day would be gone _obj.p.todate = datetime.datetime.combine( _obj.p.todate, _obj.p.sessionend) _obj.fromdate = date2num(_obj.p.fromdate) _obj.todate = date2num(_obj.p.todate) _obj.sessionstart = time2num(_obj.p.sessionstart) _obj.sessionend = time2num(_obj.p.sessionend) # hold datamaster points corresponding to own _obj.mlen = list() _obj._barstack = collections.deque() # for filter operations _obj._filters = list() _obj._ffilters = list() for fp in _obj.p.filters: if inspect.isclass(fp): fp = fp(_obj) if hasattr(fp, 'last'): _obj._ffilters.append((fp, [], {})) _obj._filters.append((fp, [], {})) return _obj, args, kwargs
def _load(self): # If no file, no reading if self.full is None: return False try: candle = next(self.itr) except StopIteration: return False # Put rates to lines attribute self.lines.datetime[0] = bt.date2num(candle[0]) if candle[1]['open'] == 0.0: self.lines.open[0] = np.nan self.lines.high[0] = np.nan self.lines.low[0] = np.nan self.lines.close[0] = np.nan else: self.lines.open[0] = candle[1]['open'] self.lines.high[0] = candle[1]['high'] self.lines.low[0] = candle[1]['low'] self.lines.close[0] = candle[1]['close'] if 'spread' in self.full.columns: self.lines.spread[0] = candle[1]['spread'] return True
def _load_history(self, msg): dtobj = datetime.utcfromtimestamp(int(msg['time']) / 10 ** 6) dt = date2num(dtobj) if dt <= self.lines.datetime[-1]: return False # time already seen # Common fields self.lines.datetime[0] = dt self.lines.volume[0] = float(msg['volume']) self.lines.openinterest[0] = 0.0 # Put the prices into the bar if self.p.bidask: if not self.p.useask: self.lines.open[0] = float(msg['openBid']) self.lines.high[0] = float(msg['highBid']) self.lines.low[0] = float(msg['lowBid']) self.lines.close[0] = float(msg['closeBid']) else: self.lines.open[0] = float(msg['openAsk']) self.lines.high[0] = float(msg['highAsk']) self.lines.low[0] = float(msg['lowAsk']) self.lines.close[0] = float(msg['closeAsk']) else: self.lines.open[0] = float(msg['openMid']) self.lines.high[0] = float(msg['highMid']) self.lines.low[0] = float(msg['lowMid']) self.lines.close[0] = float(msg['closeMid']) return True
def _load_candle(self, ohlcv): time_stamp, _open, _high, _low, _close, _volume, _spread = ohlcv # Keep timezone of the MetaTRader Tradeserver and convert to date object d_time = datetime.fromtimestamp(time_stamp) dt = date2num(d_time) # time already seen if dt <= self.lines.datetime[-1]: return False def addspread(p, s): if self.p.dataname.endswith("JPY"): return round(sum([float(p), int(s) * 0.001]), 3) else: return round(sum([float(p), int(s) * 0.00001]), 5) self.lines.datetime[0] = dt self.lines.open[0] = _open if not self.p.addspread else addspread( _open, _spread) self.lines.high[0] = _high if not self.p.addspread else addspread( _high, _spread) self.lines.low[0] = _low if not self.p.addspread else addspread( _low, _spread) self.lines.close[0] = _close if not self.p.addspread else addspread( _close, _spread) self.lines.volume[0] = _volume self.lines.openinterest[0] = 0.0 return True
def _load_tick(self, msg): time_stamp, _bid, _ask = msg # Keep timezone of the MetaTRader Tradeserver and convert to date object # Convert unix timestamp to float for millisecond resolution d_time = datetime.fromtimestamp(float(time_stamp) / 1000.0) dt = date2num(d_time) # time already seen if dt <= self.lines.datetime[-1]: return False # Common fields self.lines.datetime[0] = dt self.lines.volume[0] = 0.0 self.lines.openinterest[0] = 0.0 # Put the prices into the bar tick = float(_ask) if self.p.useask else float(_bid) self.lines.open[0] = tick self.lines.high[0] = tick self.lines.low[0] = tick self.lines.close[0] = tick return True
def _load(self): try: row = next(self._rows) except StopIteration: return False # Set the standard datafields - except for datetime for datafield in self.datafields[1:]: # get the column index colidx = getattr(self.params, datafield) if colidx < 0: # column not present -- skip continue # get the line to be set line = getattr(self.lines, datafield) # indexing for pandas: 1st is colum, then row line[0] = row[colidx] # datetime colidx = getattr(self.params, self.datafields[0]) tstamp = row[colidx] # convert to float via datetime and store it dt = tstamp.to_pydatetime() dtnum = date2num(dt) # get the line to be set line = getattr(self.lines, self.datafields[0]) line[0] = dtnum # Done ... return return True
def _load_tick(self, msg): dtobj = datetime.utcfromtimestamp(float(msg['time'])) dt = date2num(dtobj) if dt <= self.lines.datetime[-1]: return False # time already seen # Common fields self.lines.datetime[0] = dt self.lines.volume[0] = 0.0 self.lines.openinterest[0] = 0.0 # Put the prices into the bar if self.p.bidask: if self.p.useask: tick = float(msg['asks'][0]['price']) else: tick = float(msg['bids'][0]['price']) else: # create mid price tick = (float(msg['bids'][0]['price']) + float(msg['asks'][0]['price'])) / 2 self.lines.open[0] = tick self.lines.high[0] = tick self.lines.low[0] = tick self.lines.close[0] = tick self.lines.volume[0] = 0.0 self.lines.openinterest[0] = 0.0 return True
def __init__(self): self.engine = create_engine('mysql+pymysql://'+self.p.dbUser+':'+ self.p.dbPWD +'@'+ self.p.dbHost +'/'+ self.p.dbName +'?charset=utf8mb4', echo=False) self.conn = self.engine.connect() self.result = self.conn.execute(f'SELECT date,open,high,low,close,volume FROM {self.p.table} WHERE ticker = "{self.p.symbol}" AND date >= "{self.p.fromdate.strftime("%Y-%m-%d")}" and date <="{self.p.todate.strftime("%Y-%m-%d")}" ORDER BY date ASC') #self.result = self.conn.execute(f'SELECT date,open,high,low,close,volume FROM {self.p.table} WHERE ticker = "{self.p.symbol}" AND date >= "{self.p.fromdate.strftime("%Y-%m-%d")}" and date <="{self.p.todate.strftime("%Y-%m-%d")}" ORDER BY date ASC') print(f'Data AVAILABLE - Collecting data for {self.p.symbol} from mySQL database') myresult = self.result.fetchall() print(myresult) #Get dictionary from SQL results self.mytuple = () mylist = [[] for _ in range(len(myresult))] rowcount=0 for rowproxy in myresult: count=0 for i in rowproxy: if count != 0: mylist[rowcount].append(i) if count == 0: dateset = date2num(i) mylist[rowcount].append(dateset) count += 1 rowcount +=1 self.mytuple = tuple(mylist) self.counter=0
def _load_tick(self, msg): dtobj = datetime.utcfromtimestamp(float(msg['time'])) dt = date2num(dtobj) if dt <= self.l.datetime[-1]: return False # time already seen # common fields self.l.datetime[0] = dt self.l.volume[0] = 0.0 self.l.openinterest[0] = 0.0 # put the prices into the bar price = {} price['ask'] = float(msg['asks'][0]['price']) price['bid'] = float(msg['bids'][0]['price']) price['mid'] = round((price['bid'] + price['ask']) / 2, self.contractdetails['displayPrecision']) if self.p.bidask: if self.p.useask: price[None] = 'ask' else: price[None] = 'bid' else: price[None] = 'mid' for t in ['open', 'high', 'low', 'close']: getattr(self.l, t)[0] = price[price[None]] for x in ['mid', 'bid', 'ask']: getattr(self.l, f'{x}_close')[0] = price[x] self.l.volume[0] = 0.0 self.l.openinterest[0] = 0.0 return True
def _loadline(self, linetokens): itoken = iter(linetokens) dttxt = next(itoken) print(dttxt) y = int(dttxt[0:4]) m = int(dttxt[4:6]) d = int(dttxt[6:8]) print(y) print(m) print(d) timee = next(itoken) print(timee) h = int(timee[0:2]) mi = int(timee[2:4]) se = int(timee[4:6]) print(h) print(mi) print(se) dt = datetime.datetime(y, m, d, hour=h, minute=mi, second=se) print(dt) dtnum = bt.date2num(dt) self.lines.datetime[0] = dtnum self.lines.open[0] = float(next(itoken)) self.lines.high[0] = float(next(itoken)) self.lines.low[0] = float(next(itoken)) self.lines.close[0] = float(next(itoken)) self.lines.volume[0] = float(next(itoken)) self.lines.openinterest[0] = 0 return True
def _load_candle(self, msg): dtobj = datetime.utcfromtimestamp(float(msg['time'])) dt = date2num(dtobj) if dt <= self.lines.datetime[-1]: return False # time already seen # Common fields self.lines.datetime[0] = dt self.lines.volume[0] = float(msg['volume']) self.lines.openinterest[0] = 0.0 # Put the prices into the bar if self.p.bidask: if not self.p.useask: self.lines.open[0] = float(msg['bid']['o']) self.lines.high[0] = float(msg['bid']['h']) self.lines.low[0] = float(msg['bid']['l']) self.lines.close[0] = float(msg['bid']['c']) else: self.lines.open[0] = float(msg['ask']['o']) self.lines.high[0] = float(msg['ask']['h']) self.lines.low[0] = float(msg['ask']['l']) self.lines.close[0] = float(msg['ask']['c']) else: self.lines.open[0] = float(msg['mid']['o']) self.lines.high[0] = float(msg['mid']['h']) self.lines.low[0] = float(msg['mid']['l']) self.lines.close[0] = float(msg['mid']['c']) return True
def _load(self): try: row = next(self._rows) except StopIteration: return False # Set the standard datafields - except for datetime for datafield in self.datafields[1:]: # get the column index colidx = getattr(self.params, datafield) if colidx < 0: # column not present -- skip continue # get the line to be set line = getattr(self.lines, datafield) line[0] = row[colidx] # datetime - assumed blaze always serves a native datetime.datetime colidx = getattr(self.params, self.datafields[0]) dt = row[colidx] dtnum = date2num(dt) # get the line to be set line = getattr(self.lines, self.datafields[0]) line[0] = dtnum # Done ... return return True
def _load_ohlcv(self, granularity): for ohlcv in self.exchange.fetch_ohlcv(self.symbol, granularity, limit=self.ohlcv_limit): tstamp = ohlcv[0] if tstamp > self._last_id: self._data.append(ohlcv) self._last_id = tstamp try: ohlcv = self._data.popleft() except IndexError: return # no bars in the queue tstamp, open_, high, low, close, volume = ohlcv dtime = datetime.utcfromtimestamp(tstamp // 1000) self.lines.datetime[0] = bt.date2num(dtime) self.lines.open[0] = open_ self.lines.high[0] = high self.lines.low[0] = low self.lines.close[0] = close self.lines.volume[0] = volume print("loaded bar time: %s, open: %s, high: %s, low: %s, close: %s, volume: %s" % \ (dtime.strftime('%Y-%m-%d %H:%M:%S'), open_, high, low, close, volume)) return True
def _load_ticks(self): if self._last_id is None: # first time get the latest trade only trades = [self.store.fetch_trades(self.p.dataname)[-1]] else: trades = self.store.fetch_trades(self.p.dataname) for trade in trades: trade_id = trade['id'] if trade_id > self._last_id: trade_time = datetime.strptime(trade['datetime'], '%Y-%m-%dT%H:%M:%S.%fZ') self._data.append((trade_time, float(trade['price']), float(trade['amount']))) self._last_id = trade_id try: trade = self._data.popleft() except IndexError: return None # no data in the queue trade_time, price, size = trade self.lines.datetime[0] = bt.date2num(trade_time) self.lines.open[0] = price self.lines.high[0] = price self.lines.low[0] = price self.lines.close[0] = price self.lines.volume[0] = size return True
def _load(self): if self.iter is None: # if no data ... no parsing return False try: row = next(self.iter) except StopIteration: # end of the list return False # fill the lines #log.info(row['trade_date']) #log.info(row['date']) # Format is YYYYMMDD y = int(row['trade_date'][0:4]) m = int(row['trade_date'][4:6]) d = int(row['trade_date'][6:8]) dt = datetime.datetime(y, m, d) #log.info(dt) self.lines.datetime[0] = date2num(dt) #log.info(self.lines.datetime[0]) #self.date2num(datetime.strptime(row['trade_date'], '%d/%m/%y')) self.lines.open[0] = row['open'] self.lines.high[0] = row['high'] self.lines.low[0] = row['low'] self.lines.close[0] = row['close'] self.lines.volume[0] = row['vol'] self.lines.openinterest[0] = 0 return True
def _load(self): if self._state == self._ST_OVER: return False while True: # We won't have data for a while when starting so don't raise # an error if we can't find the key. try: if data_queue[self.p.dataname].empty(): return None data = data_queue[self.p.dataname].get() if self.p.debug: print('{} Data Receieved'.format(self.p.dataname)) except KeyError as e: return None # Now we have data, process it. try: if 'DT' in data.keys(): dtime = datetime.strptime(data['DT'], '%Y-%m-%dT%H:%M:%SZ') else: dtime = datetime.now() self.lines.datetime[0] = bt.date2num(dtime) if 'O' in data.keys(): self.lines.open[0] = data['O'] if 'H' in data.keys(): self.lines.high[0] = data['H'] if 'L' in data.keys(): self.lines.low[0] = data['L'] if 'C' in data.keys(): self.lines.close[0] = data['C'] if 'V' in data.keys(): self.lines.volume[0] = data['V'] if 'action' in data.keys(): self.lines.signal[0] = data['action'] except (KeyError, TypeError) as e: print('Bad Syntax in alert. Please check') print('{}'.format(e)) print('Data Supplied: {}'.format(data)) return False if self.p.debug: print('{} Loaded OHLC Data'.format(self.p.dataname)) return True return None
def _load(self): done = False global data_queue while not done: try: row = data_queue.get(block=True, timeout=1) json_val = json.loads(row) # print(row[0]) self.lines.datetime[0] = bt.date2num(datetime.datetime.fromtimestamp(json_val["s"]/1000)) self.lines.open[0] = json_val["o"] self.lines.high[0] = json_val["h"] self.lines.low[0] = json_val["l"] self.lines.close[0] = json_val["c"] self.lines.volume[0] = json_val["v"] self.lines.openinterest[0] = 0 done = True except queue.Empty: return False except Exception as e: print(e) print(json_val) pass # print(self.lines.datetime[0]) return True
def _load_history(self, msg): dtobj = datetime.utcfromtimestamp(int(msg['time']) / 10**6) dt = date2num(dtobj) if dt <= self.lines.datetime[-1]: return False # time already seen # Common fields self.lines.datetime[0] = dt self.lines.volume[0] = float(msg['volume']) self.lines.openinterest[0] = 0.0 # Put the prices into the bar if self.p.bidask: if not self.p.useask: self.lines.open[0] = float(msg['openBid']) self.lines.high[0] = float(msg['highBid']) self.lines.low[0] = float(msg['lowBid']) self.lines.close[0] = float(msg['closeBid']) else: self.lines.open[0] = float(msg['openAsk']) self.lines.high[0] = float(msg['highAsk']) self.lines.low[0] = float(msg['lowAsk']) self.lines.close[0] = float(msg['closeAsk']) else: self.lines.open[0] = float(msg['openMid']) self.lines.high[0] = float(msg['highMid']) self.lines.low[0] = float(msg['lowMid']) self.lines.close[0] = float(msg['closeMid']) return True
def _load(self): self._idx += 1 if self._idx >= len(self.p.dataname): # exhausted all rows return False # Set the standard datafields for datafield in self.getlinealiases(): if datafield == 'datetime': continue colindex = self._colmapping[datafield] if colindex is None: # datafield signaled as missing in the stream: skip it continue # get the line to be set line = getattr(self.lines, datafield) # print(self.lines) # print(datafield) # print(line) # print(self._idx) # print(colindex) # print(self.p.dataname) # a=(self.p.dataname.iloc[0,5]) # print(a) # indexing for pandas: 1st is colum, then row if (type(self.p.dataname.iloc[self._idx, colindex]).__name__=='str'): # print("str", self.p.dataname.iloc[self._idx, colindex] ) if (self.p.dataname.iloc[self._idx, colindex]=='null'): # continue line[0] = 0 else: line[0] = float(self.p.dataname.iloc[self._idx, colindex]) else: # print("Not str", type(self.p.dataname.iloc[self._idx, colindex])) line[0] = self.p.dataname.iloc[self._idx, colindex] # datetime conversion coldtime = self._colmapping['datetime'] if coldtime is None: # standard index in the datetime tstamp = self.p.dataname.index[self._idx] else: # it's in a different column ... use standard column index tstamp = self.p.dataname.iloc[self._idx, coldtime] dt = parser.parse(tstamp) dtnum = date2num(dt) self.lines.datetime[0] = dtnum # Done ... return return True
def _new_load(self): ret = self._old_load() if ret: datetime = self.datetime.datetime(0) self.datetime[0] = bt.date2num(datetime - timedelta(minutes=timeframe - 1)) return ret
def _get_start_end(strategy, start, end): st_dtime = strategy.lines.datetime.array if start is None: start = 0 if end is None: end = len(st_dtime) if isinstance(start, datetime.date): start = bisect.bisect_left(st_dtime, bt.date2num(start)) if isinstance(end, datetime.date): end = bisect.bisect_right(st_dtime, bt.date2num(end)) if end < 0: end = len(st_dtime) + 1 + end return start, end
def _load_row(self, row): self.lines.datetime[0] = date2num(row.name) self.lines.open[0] = row.open self.lines.high[0] = row.high self.lines.low[0] = row.low self.lines.close[0] = row.close self.lines.volume[0] = row.volume self.lines.openinterest[0] = row['count']
def _load_history(self, ohlcv): time_stamp, _open, _high, _low, _close, _volume = ohlcv d_time = datetime.utcfromtimestamp(time_stamp) dt = date2num(d_time) # time already seen if dt <= self.lines.datetime[-1]: return False self.lines.datetime[0] = date2num(d_time) self.lines.open[0] = _open self.lines.high[0] = _high self.lines.low[0] = _low self.lines.close[0] = _close self.lines.volume[0] = _volume self.lines.openinterest[0] = 0.0 return True
def _load(self): if self._state == self._ST_NOTFOUND: return False # nothing can be done while True: try: # tmout <> 0 only if resampling/replaying, else no waking up tmout = self._qcheck * bool(self.resampling) msg = self.q.get(timeout=tmout) except queue.Empty: return None if msg is None: return False # end of stream if msg == self.store._RT_SHUTDOWN: self.put_notification(self.DISCONNECTED) return False # VC has exited if msg == self.store._RT_DISCONNECTED: self.put_notification(self.CONNBROKEN) continue if msg == self.store._RT_CONNECTED: self.put_notification(self.CONNECTED) self.put_notification(self.DELAYED) continue if msg == self.store._RT_LIVE: if self._laststatus != self.LIVE: self.put_notification(self.LIVE) continue if msg == self.store._RT_DELAYED: if self._laststatus != self.DELAYED: self.put_notification(self.DELAYED) continue if isinstance(msg, integer_types): self.put_notification(self.UNKNOWN, msg) continue # it must be a bar bar = msg # Put the tick into the bar self.lines.open[0] = bar.Open self.lines.high[0] = bar.High self.lines.low[0] = bar.Low self.lines.close[0] = bar.Close self.lines.volume[0] = bar.Volume self.lines.openinterest[0] = bar.OpenInterest # Convert time to "market" time (096 exception) dt = self.NULLDATE + timedelta(days=bar.Date) - self._mktoffset self.lines.datetime[0] = date2num(dt) return True
def get_strategy_start_end(strategy, start, end): """Get start and end indices for strategy by given start and end datetimes.""" st_dtime = strategy.lines.datetime.array if start is None: start = 0 if end is None: end = len(st_dtime) if isinstance(start, datetime.date): start = bisect.bisect_left(st_dtime, bt.date2num(start)) if isinstance(end, datetime.date): end = bisect.bisect_right(st_dtime, bt.date2num(end)) if end < 0: end = len(st_dtime) + 1 + end return start, end
def OnNewCandle(self, data): """Обработчик события прихода нового бара""" self.jsonBar = None # Сбрасываем текущий бар jsonData = data['data'] # Новый бар if jsonData['class'] != self.classCode or jsonData['sec'] != self.secCode or int(jsonData['interval'] != self.interval): # Если бар по другому тикеру / временнОму интервалу return # то выходим, дальше не продолжаем jsonDateTime = jsonData['datetime'] # Вытаскиваем составное значение даты и времени начала бара dt = datetime(jsonDateTime['year'], jsonDateTime['month'], jsonDateTime['day'], jsonDateTime['hour'], jsonDateTime['min']) # Переводим в формат datetime if date2num(dt) <= self.lines.datetime[-1]: # Если получили предыдущий или более старый бар return # то выходим, дальше не продолжаем self.jsonBar = jsonData # Новый бар получен
def _load(self): """Загружаем бар из истории или новый бар в BackTrader""" if self.newCandleSubscribed: # Если получаем новые бары по подписке if self.jsonBar is None: # Если новый бар еще не появился return None # то нового бара нет, будем заходить еще else: # Если получаем исторические данные if len(self.jsonBars) == 0: # Если исторических данных нет (QUIK отключен от сервера брокера) self.put_notification(self.DISCONNECTED) # Отправляем уведомление об окончании получения исторических баров return False # Больше сюда заходить не будем if self.barId > self.lastBarId: # Если получили все бары из истории self.put_notification(self.DISCONNECTED) # Отправляем уведомление об окончании получения исторических баров if not self.p.LiveBars: # Если новые бары не принимаем return False # Больше сюда заходить не будем # Принимаем новые бары self.jsonBar = None # Сбрасываем последний бар истории, чтобы он не дублировался как новый бар self.store.qpProvider.OnNewCandle = self.OnNewCandle # Получение нового бара. В первый раз получим все бары с начала прошлой сессии self.store.qpProvider.SubscribeToCandles(self.classCode, self.secCode, self.interval) # Подписываемся на новые бары self.newCandleSubscribed = True # Получаем новые бары по подписке return None # Будем заходить еще else: # Если еще не получили все бары из истории self.jsonBar = self.jsonBars[self.barId] # Получаем следующий бар из истории # Исторический / новый бар jsonDateTime = self.jsonBar['datetime'] # Вытаскиваем составное значение даты и времени открытия бара dt = datetime(jsonDateTime['year'], jsonDateTime['month'], jsonDateTime['day'], jsonDateTime['hour'], jsonDateTime['min']) # Время открытия бара self.lines.datetime[0] = date2num(dt) # Переводим в формат хранения даты/времени в BackTrader self.lines.open[0] = self.store.QKToBTPrice(self.classCode, self.secCode, self.jsonBar['open']) self.lines.high[0] = self.store.QKToBTPrice(self.classCode, self.secCode, self.jsonBar['high']) self.lines.low[0] = self.store.QKToBTPrice(self.classCode, self.secCode, self.jsonBar['low']) self.lines.close[0] = self.store.QKToBTPrice(self.classCode, self.secCode, self.jsonBar['close']) self.lines.volume[0] = self.jsonBar['volume'] self.lines.openinterest[0] = 0 # Открытый интерес в QUIK не учитывается # Исторический бар if self.barId <= self.lastBarId: # Если еще не получили все бары из истории self.barId += 1 # то переходим на следующий бар return True # Будем заходить сюда еще # Новый бар timeOpen = self.p.tz.localize(dt) # Биржевое время открытия бара timeNextClose = timeOpen + timedelta(minutes=self.interval*2) # Биржевое время закрытия следующего бара timeMarketNow = datetime.now(self.p.tz) # Текущее биржевое время if not self.lifeMode and timeNextClose > timeMarketNow: # Если не в режиме получения новых баров, и следующий бар закроется позже текущего времени на бирже self.put_notification(self.LIVE) # Уведомляем о получении новых баров self.lifeMode = True # Переходим в режим получения новых баров # Бывает ситуация, когда QUIK несколько минут не передает новые бары. Затем передает все пропущенные # Чтобы не совершать сделки на истории, меняем режим торгов на историю до прихода нового бара elif self.lifeMode and timeNextClose <= timeMarketNow: # Если в режиме получения новых баров, и следующий бар закроется до текущего времени на бирже self.put_notification(self.DELAYED) # Отправляем уведомление об отправке исторических (не новых) баров self.lifeMode = False # Переходим в режим получения истории self.jsonBar = None # Сбрасываем текущий бар return True # Будем заходить еще
def _load_rtbar(self, rtbar): # Datetime transformation self.lines.datetime[0] = date2num(rtbar.time) # Put the tick into the bar self.lines.open[0] = rtbar.open self.lines.high[0] = rtbar.high self.lines.low[0] = rtbar.low self.lines.close[0] = rtbar.close self.lines.volume[0] = rtbar.volume self.lines.openinterest[0] = 0 return True
def push_commissionreport(self, cr): with self._lock_orders: ex = self.executions.pop(cr.m_execId) oid = ex.m_orderId order = self.orderbyid[oid] ostatus = self.ordstatus[oid].pop(ex.m_cumQty) position = self.getposition(order.data, clone=False) pprice_orig = position.price size = ex.m_shares if ex.m_side[0] == 'B' else -ex.m_shares price = ex.m_price # use pseudoupdate and let the updateportfolio do the real update? psize, pprice, opened, closed = position.update(size, price) # split commission between closed and opened comm = cr.m_commission closedcomm = comm * closed / size openedcomm = comm - closedcomm comminfo = order.comminfo closedvalue = comminfo.getoperationcost(closed, pprice_orig) openedvalue = comminfo.getoperationcost(opened, price) # default in m_pnl is MAXFLOAT pnl = cr.m_realizedPNL if closed else 0.0 # The internal broker calc should yield the same result # pnl = comminfo.profitandloss(-closed, pprice_orig, price) # Use the actual time provided by the execution object # The report from TWS is in actual local time, not the data's tz dt = date2num(datetime.strptime(ex.m_time, '%Y%m%d %H:%M:%S')) # Need to simulate a margin, but it plays no role, because it is # controlled by a real broker. Let's set the price of the item margin = order.data.close[0] order.execute(dt, size, price, closed, closedvalue, closedcomm, opened, openedvalue, openedcomm, margin, pnl, psize, pprice) if ostatus.status == self.FILLED: order.completed() self.ordstatus.pop(oid) # nothing left to be reported else: order.partial() if oid not in self.tonotify: # Lock needed self.tonotify.append(oid)
def _load_rtvolume(self, rtvol): # Datetime transformation self.lines.datetime[0] = date2num(rtvol.datetime) # Put the tick into the bar tick = rtvol.price self.lines.open[0] = tick self.lines.high[0] = tick self.lines.low[0] = tick self.lines.close[0] = tick self.lines.volume[0] = rtvol.size self.lines.openinterest[0] = 0 return True
def _frombars(self): dtime, price = self._fillbars.popleft() price = self.p.fill_price or price self.lines.datetime[0] = date2num(dtime) self.lines.open[0] = price self.lines.high[0] = price self.lines.low[0] = price self.lines.close[0] = price self.lines.volume[0] = self.p.fill_vol self.lines.openinterest[0] = self.p.fill_oi return True
def _load(self): if self.f is None: return False # cannot load more try: bardata = self.f.read(self._barsize) except IOError: self.f = None # cannot return, nullify file return False # cannot load more if not bardata or len(bardata) < self._barsize: self.f = None # cannot return, nullify file return False # cannot load more try: bdata = unpack(self._barfmt, bardata) except: self.f = None return False # First Date y, md = divmod(bdata[0], 500) # Years stored as if they had 500 days m, d = divmod(md, 32) # Months stored as if they had 32 days dt = datetime(y, m, d) # Time if self._dtsize > 1: # Minute Bars # Daily Time is stored in seconds hhmm, ss = divmod(bdata[1], 60) hh, mm = divmod(hhmm, 60) dt = dt.replace(hour=hh, minute=mm, second=ss) else: # Daily Bars dt = datetime.combine(dt, self.p.sessionend) self.lines.datetime[0] = date2num(dt) # Store time # Get the rest of the fields o, h, l, c, v, oi = bdata[self._dtsize:] self.lines.open[0] = o self.lines.high[0] = h self.lines.low[0] = l self.lines.close[0] = c self.lines.volume[0] = v self.lines.openinterest[0] = oi return True # a bar has been successfully loaded
def _load_rtbar(self, rtbar, hist=False): # A complete 5 second bar made of real-time ticks is delivered and # contains open/high/low/close/volume prices # The historical data has the same data but with 'date' instead of # 'time' for datetime dt = date2num(rtbar.time if not hist else rtbar.date) if dt <= self.lines.datetime[-1] and not self.p.latethrough: return False # cannot deliver earlier than already delivered self.lines.datetime[0] = dt # Put the tick into the bar self.lines.open[0] = rtbar.open self.lines.high[0] = rtbar.high self.lines.low[0] = rtbar.low self.lines.close[0] = rtbar.close self.lines.volume[0] = rtbar.volume self.lines.openinterest[0] = 0 return True
def _adjusttime(self, index=0): ''' Adjusts the time of calculated bar (from underlying data source) by using the timeframe to the appropriate boundary taken int account compression Depending on param ``rightedge`` uses the starting boundary or the ending one ''' # Get current time tm = self.lines.datetime.time(0) # Get the point of the day in the time frame unit (ex: minute 200) point = self._gettmpoint(tm) # Apply compression to update the point position (comp 5 -> 200 // 5) point = (point // self.p.compression) # If rightedge (end of boundary is activated) add it if point % self.p.compression: point += self.p.rightedge # Restore point to the timeframe units by de-applying compression point *= self.p.compression # Get hours, minutes, seconds and microseconds if self._timeframe == TimeFrame.Minutes: ph, pm = divmod(point, 60) ps = 0 pus = 0 elif self._timeframe == TimeFrame.Seconds: ph, pm = divmod(point, 60 * 60) pm, ps = divmod(pm, 60) pus = 0 elif self._timeframe == TimeFrame.MicroSeconds: ph, pm = divmod(point, 60 * 60 * 1e6) pm, psec = divmod(pm, 60 * 1e6) ps, pus = divmod(psec, 1e6) # Get current datetime value which was taken from data dt = self.lines.datetime.datetime(index) # Replace intraday parts with the calculated ones and update it dt = dt.replace(hour=ph, minute=pm, second=ps, microsecond=pus) self.lines.datetime[0] = date2num(dt)
def _load(self): if self.f is None: # if no file ... no parsing return False # Read the needed amount of binary data bardata = self.f.read(self.barsize) if not bardata: # if no data was read ... game over say "False" return False # use struct to unpack the data bdata = struct.unpack(self.barfmt, bardata) # Years are stored as if they had 500 days y, md = divmod(bdata[0], 500) # Months are stored as if they had 32 days m, d = divmod(md, 32) # put y, m, d in a datetime dt = datetime.datetime(y, m, d) if self.dtsize > 1: # Minute Bars # Daily Time is stored in seconds hhmm, ss = divmod(bdata[1], 60) hh, mm = divmod(hhmm, 60) # add the time to the existing atetime dt = dt.replace(hour=hh, minute=mm, second=ss) self.lines.datetime[0] = date2num(dt) # Get the rest of the unpacked data o, h, l, c, v, oi = bdata[self.dtsize:] self.lines.open[0] = o self.lines.high[0] = h self.lines.low[0] = l self.lines.close[0] = c self.lines.volume[0] = v self.lines.openinterest[0] = oi # Say success return True
def __call__(self, data): ''' Return Values: - False: data stream was not touched - True: data stream was manipulated (bar outside of session times and - removed) ''' datadt = data.datetime.datetime() newdt = datetime(datadt.year, datadt.month, datadt.day, datadt.hour, datadt.minute, 0) dseconds = (datadt - newdt).seconds if dseconds <= self.p.jitter: data.datetime[0] = backtrader.date2num(newdt) return True return False
def _load_rtvolume(self, rtvol): # A single tick is delivered and is therefore used for the entire set # of prices. Ideally the # contains open/high/low/close/volume prices # Datetime transformation dt = date2num(rtvol.datetime) if dt <= self.lines.datetime[-1] and not self.p.latethrough: return False # cannot deliver earlier than already delivered self.lines.datetime[0] = dt # Put the tick into the bar tick = rtvol.price self.lines.open[0] = tick self.lines.high[0] = tick self.lines.low[0] = tick self.lines.close[0] = tick self.lines.volume[0] = rtvol.size self.lines.openinterest[0] = 0 return True
def _load_tick(self, msg): dtobj = datetime.utcfromtimestamp(int(msg['time']) / 10 ** 6) dt = date2num(dtobj) if dt <= self.lines.datetime[-1]: return False # time already seen # Common fields self.lines.datetime[0] = dt self.lines.volume[0] = 0.0 self.lines.openinterest[0] = 0.0 # Put the prices into the bar tick = float(msg['ask']) if self.p.useask else float(msg['bid']) self.lines.open[0] = tick self.lines.high[0] = tick self.lines.low[0] = tick self.lines.close[0] = tick self.lines.volume[0] = 0.0 self.lines.openinterest[0] = 0.0 return True
def _load(self): self._idx += 1 if self._idx >= len(self.p.dataname): # exhausted all rows return False # Set the standard datafields for datafield in self.datafields[1:]: colindex = self._colmapping[datafield] if colindex is None: # datafield signaled as missing in the stream: skip it continue # get the line to be set line = getattr(self.lines, datafield) # indexing for pandas: 1st is colum, then row line[0] = self.p.dataname[colindex][self._idx] # datetime conversion coldtime = self._colmapping[self.datafields[0]] if coldtime is None: # standard index in the datetime tstamp = self.p.dataname.index[self._idx] else: # it's in a different column ... use standard column index tstamp = self.p.dataname.index[coldtime][self._idx] # convert to float via datetime and store it dt = tstamp.to_pydatetime() dtnum = date2num(dt) self.lines.datetime[0] = dtnum # Done ... return return True
def _fillbar(self, data, dtime): # Prepare an array of the needed size bar = [float('Nan')] * data.size() # Fill datetime bar[data.DateTime] = date2num(dtime) # Fill the prices price = self.p.fill_price or data.close[-1] for pricetype in [data.Open, data.High, data.Low, data.Close]: bar[pricetype] = price # Fill volume and open interest bar[data.Volume] = self.p.fill_vol bar[data.OpenInterest] = self.p.fill_oi # Fill extra lines the data feed may have defined beyond DateTime for i in range(data.DateTime + 1, data.size()): bar[i] = data.lines[i][0] # Add tot he stack of bars to save data._add2stack(bar) return True
def date2num(self, dt): if self._tz is not None: return date2num(self._tz.localize(dt)) return date2num(dt)
def _load(self): if self.contract is None or self._state == self._ST_OVER: return False # nothing can be done while True: if self._state == self._ST_LIVE: try: msg = (self._storedmsg.pop(None, None) or self.qlive.get(timeout=self._qcheck)) except queue.Empty: if True: return None # Code invalidated until further checking is done if not self._statelivereconn: return None # indicate timeout situation # Awaiting data and nothing came in - fake it up until now dtend = self.num2date(date2num(datetime.datetime.utcnow())) dtbegin = None if len(self) > 1: dtbegin = self.num2date(self.datetime[-1]) self.qhist = self.ib.reqHistoricalDataEx( contract=self.contract, enddate=dtend, begindate=dtbegin, timeframe=self._timeframe, compression=self._compression, what=self.p.what, useRTH=self.p.useRTH, tz=self._tz, sessionend=self.p.sessionend) if self._laststatus != self.DELAYED: self.put_notification(self.DELAYED) self._state = self._ST_HISTORBACK self._statelivereconn = False continue # to reenter the loop and hit st_historback if msg is None: # Conn broken during historical/backfilling self.put_notification(self.CONNBROKEN) # Try to reconnect if not self.ib.reconnect(resub=True): self.put_notification(self.DISCONNECTED) return False # failed self._statelivereconn = self.p.backfill continue if msg == -354: self.put_notification(self.NOTSUBSCRIBED) return False elif msg == -1100: # conn broken # Tell to wait for a message to do a backfill # self._state = self._ST_DISCONN self._statelivereconn = self.p.backfill continue elif msg == -1102: # conn broken/restored tickerId maintained # The message may be duplicated if not self._statelivereconn: self._statelivereconn = self.p.backfill continue elif msg == -1101: # conn broken/restored tickerId gone # The message may be duplicated if not self._statelivereconn: self._statelivereconn = self.p.backfill self.reqdata() # resubscribe continue elif isinstance(msg, integer_types): # Unexpected notification for historical data skip it # May be a "not connected not yet processed" self.put_notification(self.UNKNOWN, msg) continue # Process the message according to expected return type if not self._statelivereconn: if self._laststatus != self.LIVE: if self.qlive.qsize() <= 1: # very short live queue self.put_notification(self.LIVE) if self._usertvol: ret = self._load_rtvolume(msg) else: ret = self._load_rtbar(msg) if ret: return True # could not load bar ... go and get new one continue # Fall through to processing reconnect - try to backfill self._storedmsg[None] = msg # keep the msg # else do a backfill if self._laststatus != self.DELAYED: self.put_notification(self.DELAYED) dtend = None if len(self) > 1: # len == 1 ... forwarded for the 1st time # get begin date in utc-like format like msg.datetime dtbegin = num2date(self.datetime[-1]) elif self.fromdate > float('-inf'): dtbegin = num2date(self.fromdate) else: # 1st bar and no begin set # passing None to fetch max possible in 1 request dtbegin = None dtend = msg.datetime if self._usertvol else msg.time self.qhist = self.ib.reqHistoricalDataEx( contract=self.contract, enddate=dtend, begindate=dtbegin, timeframe=self._timeframe, compression=self._compression, what=self.p.what, useRTH=self.p.useRTH, tz=self._tz, sessionend=self.p.sessionend) self._state = self._ST_HISTORBACK self._statelivereconn = False # no longer in live continue elif self._state == self._ST_HISTORBACK: msg = self.qhist.get() if msg is None: # Conn broken during historical/backfilling # Situation not managed. Simply bail out self.put_notification(self.DISCONNECTED) return False # error management cancelled the queue elif msg == -354: # Data not subscribed self.put_notification(self.NOTSUBSCRIBED) return False elif msg == -420: # No permissions for the data self.put_notification(self.NOTSUBSCRIBED) return False elif isinstance(msg, integer_types): # Unexpected notification for historical data skip it # May be a "not connected not yet processed" self.put_notification(self.UNKNOWN, msg) continue if msg.date is not None: if self._load_rtbar(msg, hist=True): return True # loading worked # the date is from overlapping historical request continue # End of histdata if self.p.historical: # only historical self.put_notification(self.DISCONNECTED) return False # end of historical # Live is also wished - go for it self._state = self._ST_LIVE continue elif self._state == self._ST_FROM: if not self.p.backfill_from.next(): # additional data source is consumed self._state = self._ST_START continue # copy lines of the same name for alias in self.lines.getlinealiases(): lsrc = getattr(self.p.backfill_from.lines, alias) ldst = getattr(self.lines, alias) ldst[0] = lsrc[0] return True elif self._state == self._ST_START: if not self._st_start(): return False
def load(self): while True: # move data pointer forward for new bar self.forward() if self._fromstack(): # bar is available return True if not self._fromstack(stash=True): _loadret = self._load() if not _loadret: # no bar use force to make sure in exactbars # the pointer is undone this covers especially (but not # uniquely) the case in which the last bar has been seen # and a backwards would ruin pointer accounting in the # "stop" method of the strategy self.backwards(force=True) # undo data pointer # return the actual returned value which may be None to # signal no bar is available, but the data feed is not # done. False means game over return _loadret # Get a reference to current loaded time dt = self.lines.datetime[0] # A bar has been loaded, adapt the time if self._tzinput: # Input has been converted at face value but it's not UTC in # the input stream dtime = num2date(dt) # get it in a naive datetime # localize it dtime = self._tzinput.localize(dtime) # pytz compatible-ized self.lines.datetime[0] = dt = date2num(dtime) # keep UTC val # Check standard date from/to filters if dt < self.fromdate: # discard loaded bar and carry on self.backwards() continue if dt > self.todate: # discard loaded bar and break out self.backwards(force=True) break # Pass through filters retff = False for ff, fargs, fkwargs in self._filters: # previous filter may have put things onto the stack if self._barstack: for i in range(len(self._barstack)): self._fromstack(forward=True) retff = ff(self, *fargs, **fkwargs) else: retff = ff(self, *fargs, **fkwargs) if retff: # bar removed from systemn break # out of the inner loop if retff: # bar removed from system - loop to get new bar continue # in the greater loop # Checks let the bar through ... notify it return True # Out of the loop ... no more bars or past todate return False