def testTimeStampConversions(self): dateTime = datetime.datetime(2000, 1, 1) self.assertEqual( dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), False), dateTime) dateTime = dt.as_utc(datetime.datetime(2000, 1, 1, 1, 1)) self.assertEqual( dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), True), dateTime)
def testTimeStampConversionsWithMicroseconds(self): dateTime = datetime.datetime(2000, 1, 1, 1, 1, 1, microsecond=10) self.assertEqual( dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), False), dateTime) dateTime = dt.as_utc( datetime.datetime(2000, 1, 1, 1, 1, 1, microsecond=10)) self.assertEqual( dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), True), dateTime)
def getBars(self, instrument, frequency, timezone=None, fromDateTime=None, toDateTime=None): instrument = normalize_instrument(instrument) sql = "select bar.timestamp, bar.open, bar.high, bar.low, bar.close, bar.volume, bar.adj_close, bar.frequency" \ " from bar join instrument on (bar.instrument_id = instrument.instrument_id)" \ " where instrument.name = ? and bar.frequency = ?" args = [instrument, frequency] if fromDateTime is not None: sql += " and bar.timestamp >= ?" args.append(dt.datetime_to_timestamp(fromDateTime)) if toDateTime is not None: sql += " and bar.timestamp <= ?" args.append(dt.datetime_to_timestamp(toDateTime)) sql += " order by bar.timestamp asc" cursor = self.__connection.cursor() cursor.execute(sql, args) ret = [] for row in cursor: dateTime = dt.timestamp_to_datetime(row[0]) if timezone: dateTime = dt.localize(dateTime, timezone) ret.append( bar.BasicBar(dateTime, row[1], row[2], row[3], row[4], row[5], row[6], row[7])) cursor.close() return ret
def getLastBarTimestamp(self): sql = "select max(timestamp) from bar" cursor = self.__connection.cursor() cursor.execute(sql) ret = cursor.fetchone() cursor.close() return dt.timestamp_to_datetime(ret[0])
def getLastValuesForInstrument(self, instrument, date): sql = "select b.timestamp, b.open, b.high, b.low, b.close, b.volume from bar b inner join instrument i on i.instrument_id=b.instrument_id where i.name=? and b.timestamp<=? order by b.timestamp desc" cursor = self.__connection.cursor() cursor.execute(sql, [instrument, dt.datetime_to_timestamp(date)]) ret = cursor.next() return (dt.timestamp_to_datetime(ret[0]), ret[1], ret[2], ret[3], ret[4], ret[5])
def getBars(self, instrument, frequency, timezone=None, fromDateTime=None, toDateTime=None): instrument = normalize_instrument(instrument) sql = "select bar.timestamp, bar.open, bar.high, bar.low, bar.close, bar.volume, bar.adj_close, bar.frequency" \ " from bar join instrument on (bar.instrument_id = instrument.instrument_id)" \ " where instrument.name = ? and bar.frequency = ?" args = [instrument, frequency] if fromDateTime is not None: sql += " and bar.timestamp >= ?" args.append(dt.datetime_to_timestamp(fromDateTime)) if toDateTime is not None: sql += " and bar.timestamp <= ?" args.append(dt.datetime_to_timestamp(toDateTime)) sql += " order by bar.timestamp asc" cursor = self.__connection.cursor() cursor.execute(sql, args) ret = [] for row in cursor: dateTime = dt.timestamp_to_datetime(row[0]) if timezone: dateTime = dt.localize(dateTime, timezone) ret.append(bar.BasicBar(dateTime, row[1], row[2], row[3], row[4], row[5], row[6], row[7])) cursor.close() return ret
def get_slot_datetime(dateTime, frequency): ts = dt.datetime_to_timestamp(dateTime) slot = ts / frequency slotTs = (slot + 1) * frequency - 1 ret = dt.timestamp_to_datetime(slotTs, False) if not dt.datetime_is_naive(dateTime): ret = dt.localize(ret, dateTime.tzinfo) return ret
def __init__(self, barDict, frequency): self.__DateTimeLocal = liveUtils.timestamp_to_DateTimeLocal( barDict["Timestamp"]) super(liveBar, self).__init__(dt.timestamp_to_datetime(barDict["Timestamp"]), barDict["Open"], barDict["High"], barDict["Low"], barDict["Close"], barDict["Volume"], None, frequency)
def getBars(self, instrument, timezone=None, fromDateTime=None, toDateTime=None): """ 生成Bar 数据 Parameters ------ instrument:string 股票代码 timezone:string fromDateTime:string toDateTime:string return 数组 """ emg = emongo() stockdb = emg.getCollectionNames("stockDB") ret = [] KL = [] print instrument for post in stockdb.find({instrument: {'$exists':1}},{instrument:1,'_id':0}): KL = KL+post[instrument] Date = [] Open = [] High = [] Low = [] Close = [] Volume = [] Adj_Close = [] for val in KL: dateTime = val['date'] dateTime = dateTime[:10] #print dateTime TimeStamp = time.mktime(time.strptime(dateTime,'%Y-%m-%d')) OdateTime = dt.timestamp_to_datetime(TimeStamp) # print("%s"%dateTimes) if type(val) is list: val = val[0] ret.append(bar.BasicBar(OdateTime, val['open'], val['high'], val['low'], val['close'], val['volume'], val['close'], bar.Frequency.DAY)) Date.append(TimeStamp) Open.append(val['open']) High.append(val['high']) Low.append(val['low']) Close.append(val['close']) Volume.append(val['volume']) Adj_Close.append(val['close']) self.__df = pd.DataFrame({'Date' : Date, 'Open' : Open, 'High' : High,'Close' : Close, 'Low' : Low,'Volume' : Volume, 'Adj Close':Adj_Close}) emg.Close() return ret
def loadOrders(self, username): cursor = self.__connection.cursor() cursor.execute(self.__LOAD_ORDERS_SQL, [username]) ret = {} for row in cursor: ret[row[0]] = self.__createOrder(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], dt.timestamp_to_datetime(row[12]), row[13], row[14]) cursor.close() return ret
def __init__(self, dateTime, frequency): assert isinstance(frequency, int) assert frequency > 1 assert frequency < bar.Frequency.DAY ts = int(dt.datetime_to_timestamp(dateTime)) slot = int(ts / frequency) slotTs = slot * frequency self.__begin = dt.timestamp_to_datetime(slotTs, not dt.datetime_is_naive(dateTime)) if not dt.datetime_is_naive(dateTime): self.__begin = dt.localize(self.__begin, dateTime.tzinfo) self.__end = self.__begin + datetime.timedelta(seconds=frequency)
def parseBar(self, csvRowDict): unixTime = int(csvRowDict["unixtime"]) price = float(csvRowDict["price"]) amount = float(csvRowDict["amount"]) dateTime = dt.timestamp_to_datetime(unixTime) dateTime = self.__unixTimeFix.fixDateTime(dateTime) # Localize the datetime if a timezone was given. if self.__timezone: dateTime = dt.localize(dateTime, self.__timezone) return TradeBar(dateTime, price, amount)
def __init__(self, dateTime, frequency): super(IntraDayRange, self).__init__() assert isinstance(frequency, int) assert frequency > 1 assert frequency < bar.Frequency.DAY ts = int(dt.datetime_to_timestamp(dateTime)) # slot = ts / frequency # 从31分开始计算 # slotTs = slot * frequency self.__begin = dt.timestamp_to_datetime( ts, not dt.datetime_is_naive(dateTime)) if not dt.datetime_is_naive(dateTime): self.__begin = dt.localize(self.__begin, dateTime.tzinfo) self.__end = self.__begin + \ datetime.timedelta(seconds=(frequency - bar.Frequency.MINUTE)) # 获取最后一个bar数据时截止 self.isFirtstCheckingNearly = True
def getBars(self, instrument, frequency, timezone=None, fromDateTime=None, toDateTime=None): instrument = normalize_instrument(instrument) instrumentId = self.__getInstrumentID(instrument) args = [instrumentId] # write column and table name if frequency is bar.Frequency.DAY: sql = self.__get_day_bar_sql() elif frequency is bar.Frequency.MINUTE: min_table_id = self.__get_minute_table_id() sql = self.__get_minute_bar_sql(min_table_id) # write where condition if fromDateTime is not None: sql += ' and timestamp >= %d' args.append(dt.datetime_to_timestamp(fromDateTime)) if toDateTime is not None: sql += ' and timestamp <= %d' args.append(dt.datetime_to_timestamp(toDateTime)) # write order sql += ' order by timestamp' # substitute parameters sql = sql % tuple(args) ret = [] result = self.mysql.fetch(sql) for row in result: dateTime = dt.timestamp_to_datetime(row[0]) if timezone: dateTime = dt.localize(dateTime, timezone) if frequency is bar.Frequency.MINUTE: o, c = self.__split(row[1]) h, l = self.__split(row[2]) v = row[3] else: o, h, l, c, v = row[1], row[2], row[3], row[4], row[5] ret.append(bar.BasicBar(dateTime, o, h, l, c, v, c, frequency)) return ret
def run_strategy_multipleinstruments(amount, stdate, enddate): from pyalgotrade import plotter seconds = mktime(stdate.timetuple()) seconds2 = mktime(enddate.timetuple()) redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379') url = urlparse.urlparse(redis_url) redisConn = redis.StrictRedis(host=url.hostname, port=url.port, password=url.password) tickerList = ['AAPL', 'MSFT', 'GS'] #### Initialize feed.... feed = Feed(Frequency.TRADE, 3000) for ticker in range(len(tickerList)): redis_data = redisConn.zrangebyscore(tickerList[ticker]+':Adj. Close', int(seconds*1000), int(seconds2*1000), 0, -1, True) bd = [] for x in range(len(redis_data)): v = float(redis_data[x][0]) dateTime = dt.timestamp_to_datetime(redis_data[x][1]/1000) bar = BasicBar(dateTime, v , v, v, v, 200000, v, Frequency.DAY) bd.append(bar) print tickerList[ticker], len(bd) feed.loadBars(tickerList[ticker], bd) # Evaluate the strategy with the feed. #myStrategy = MyStrategy(feed, ticker, amount, 20) myStrategy = MultiInstrumentStrategy(feed, tickerList, amount, 20) # Attach a returns analyzers to the strategy. returnsAnalyzer = returns.Returns() results = StrategyResults(myStrategy, returnsAnalyzer) #plt = plotter.StrategyPlotter(myStrategy) # Plot the simple returns on each bar. #plt.getOrCreateSubplot("returns").addDataSeries("Cumulative returns", returnsAnalyzer.getCumulativeReturns()) # Plot the strategy. #plt.plot() myStrategy.run() print "Final portfolio value: $%.2f" % myStrategy.getBroker().getEquity() return results;
def get_time_stamp_info(self, time_stamp, timezone=''): """ time_stamp转换为datetime :param time_stamp: :return: """ try: dateTime = dt.timestamp_to_datetime(time_stamp // 1000) if timezone: dateTime = dt.localize(dateTime, timezone) strDateTime = dateTime.strftime("%Y-%m-%d %H:%M:%S") except Exception as e: log.debug("时间戳转换失败: {}".format(e)) try: dateTime = datetime.datetime.strptime(time_stamp, "%Y-%m-%dT%H:%M:%S") except: dateTime = datetime.datetime.strptime(time_stamp, "%Y-%m-%dT%H:%M:%S.%fZ") strDateTime = dateTime.strftime("%Y-%m-%d %H:%M:%S") # dateTime = dateTime.strftime("%Y-%m-%d %H:%M:%S") return dateTime, strDateTime
def getBars(self, instrument, timezone=None, fromDateTime=None, toDateTime=None): kp = kPrice() kline = kp.getAllKLine(instrument + "_hfq") kline = kline.tail(300) ret = [] for row in kline.itertuples(): dateTime = row.date TimeStamp = time.mktime(time.strptime(dateTime, '%Y-%m-%d')) OdateTime = dt.timestamp_to_datetime(TimeStamp) #print float(row.high) #print "close:%s adjclose:%s high:%s low:%s date:%s "%(row.close, row.AdjClose,row.high,row.low, row.date) ret.append( bar.BasicBar(OdateTime, row.open, row.high, row.low, row.close, row.volume, row.close, bar.Frequency.DAY)) self.__df = kline return ret
def run_strategy_redis(ticker, amount, stdate, enddate): from pyalgotrade import plotter seconds = mktime(stdate.timetuple()) seconds2 = mktime(enddate.timetuple()) redis_url = os.environ.get('REDISCLOUD_URL', 'redis://*****:*****@@@@", Adj_Close, Adj_Open, Adj_High, Adj_Low, Adj_Volume, dateTime #print key, Adj_Close, Adj_Open, Adj_High, Adj_Low, Adj_Volume, dateTime bar = BasicBar(dateTime, Adj_Open , Adj_High, Adj_Low, Adj_Close, Adj_Volume, Adj_Close, Frequency.TRADE) bd.append(bar) ''' bd = [] Adj_Open, Adj_High, Adj_Low, Adj_Close, Adj_Volume, dt_millisec, dateTime = [] for x in range(len(redis_Adj_Close)): dt_millisec.append(redis_Adj_Close[x][1]) dateTime.append(dt.timestamp_to_datetime(redis_Adj_Close[x][1]/1000)) Adj_Close.append(float(redis_Adj_Close[x][0])) for j in range(len(dt_millisec)): dateTime = dt.timestamp_to_datetime(redis_Adj_Open[x][1]/1000) Adj_Open = float(redis_Adj_Open[x][0]) Adj_High = float(redis_Adj_High[x][0]) Adj_Low = float(redis_Adj_Low[x][0]) Adj_Close = float(redis_Adj_Close[x][0]) Adj_Volume = float(redis_Adj_Volume[x][0]) print dateTime, redis_Adj_Close[x][1]/1000, Adj_Open, Adj_High, Adj_Low, Adj_Close, Adj_Volume bar = BasicBar(dateTime, Adj_Open , Adj_High, Adj_Low, Adj_Close, Adj_Volume, Adj_Close, Frequency.TRADE) bd.append(bar) ''' feed = Feed(Frequency.DAY, 3000) feed.loadBars(ticker, bd) # Evaluate the strategy with the feed. #myStrategy = MyStrategy(feed, ticker, amount, 20) myStrategy = SMACrossOver(feed, ticker, amount, 18) # Attach a returns analyzers to the strategy. returnsAnalyzer = returns.Returns() results = StrategyResults(myStrategy, returnsAnalyzer) #plt = plotter.StrategyPlotter(myStrategy) # Plot the simple returns on each bar. #plt.getOrCreateSubplot("returns").addDataSeries("Cumulative returns", returnsAnalyzer.getCumulativeReturns()) # Plot the strategy. #plt.plot() myStrategy.run() print "Final portfolio value: $%.2f" % myStrategy.getBroker().getEquity() return results;
seconds2 = calendar.timegm(enddate.timetuple()) data_dict = {} try: redisConn = util.get_redis_conn() ### added EOD as data source ticker_data = redisConn.zrangebyscore(ticker + ":EODRAW", int(seconds), int(seconds2), 0, -1, True) data_dict = redis_listoflists_to_dict(ticker_data) except Exception,e: print str(e) pass bd = [] ##### initialize bar data..... for key in data_dict: #dateTime = dt.timestamp_to_datetime(key) dateTime = dt.timestamp_to_datetime(key).replace(tzinfo=None) data = data_dict[key].split("|") ### split pipe delimted values bar = xiQuantBasicBar(dateTime, float(data[0]) , float(data[1]), float(data[2]), float(data[3]), float(data[4]), float(data[5]), Frequency.DAY,float(data[6]), float(data[7])) bd.append(bar) #feed = Feed(Frequency.DAY, 1024) feed.loadBars(ticker, bd) return feed ''' def add_feeds_EODRAW_CSV(feed, ticker, stdate, enddate): import datetime from pyalgotrade.utils import dt from pyalgotrade.bar import BasicBar, Frequency import csv import dateutil.parser
def getDateTime(self): """Returns the :class:`datetime.datetime` when this event was generated.""" microtimestamp = int(self.getData()["microtimestamp"]) return dt.timestamp_to_datetime(microtimestamp / 1e6)
data_dict = {} ordered_data_dict = None try: redisConn = util.get_redis_conn() ### added EOD as data source ticker_data = redisConn.zrangebyscore(ticker + ":EODRAW", int(seconds), int(seconds2), 0, -1, True) data_dict = xiQuantStrategyUtil.redis_listoflists_to_dict(ticker_data) ordered_data_dict = collections.OrderedDict(sorted(data_dict.items(), reverse=False)) except Exception,e: print str(e) pass bd = [] ##### initialize bar data..... if ordered_data_dict is not None: for key in ordered_data_dict: dateTime = dt.timestamp_to_datetime(key).strftime('%m/%d/%Y') data = data_dict[key].split("|") ### split pipe delimted values dataList = [] dataList.append(ticker) dataList.append(str(dateTime)) dataList.append(float("{0:.2f}".format(float(data[0])))) dataList.append(float("{0:.2f}".format(float(data[1])))) dataList.append(float("{0:.2f}".format(float(data[2])))) dataList.append(float("{0:.2f}".format(float(data[3])))) dataList.append(float("{0:.2f}".format(float(data[4])))) dataList.append(float("{0:.2f}".format(float(data[5])))) dataList.append(float("{0:.2f}".format(float(data[6])))) dataList.append(float("{0:.2f}".format(float(data[7])))) bd.append(dataList) with open(ticker+'_EODRAW.csv', 'w') as fp:
def getDateTime(self): return dt.timestamp_to_datetime(int(self.__obj['created-at']) / 1000)
def tid_to_datetime(tid): unixTime = int(tid) / 1000000.0 return dt.timestamp_to_datetime(unixTime)
def testTimeStampConversionsWithMicroseconds(self): dateTime = datetime.datetime(2000, 1, 1, 1, 1, 1, microsecond=10) self.assertEqual(dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), False), dateTime) dateTime = dt.as_utc(datetime.datetime(2000, 1, 1, 1, 1, 1, microsecond=10)) self.assertEqual(dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), True), dateTime)
def testTimeStampConversions(self): dateTime = datetime.datetime(2000, 1, 1) self.assertEqual(dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), False), dateTime) dateTime = dt.as_utc(datetime.datetime(2000, 1, 1, 1, 1)) self.assertEqual(dt.timestamp_to_datetime(dt.datetime_to_timestamp(dateTime), True), dateTime)