def scrawl_single_tick(i, path, ex, tdates): the_dir1 = os.path.join(path, ex.upper(), str(i[2].year)) if not os.path.exists(the_dir1): os.makedirs(the_dir1) the_dir = os.path.join(path, ex.upper(), str(i[2].year), i[0] + ".csv.gz") the_dir2 = os.path.join(path, ex.upper(), str(i[2].year), i[0] + ".csv") if not os.path.exists(the_dir): print(the_dir) print(i) print(tdates[i[2]]) print(i[2]) api = TqApi(account=TqSim()) # api = TqApi(account=TqSim(),url="ws://192.168.56.1:7777") td = DataDownloader(api, symbol_list=[ex.upper() + "." + i[1]], dur_sec=0, start_dt=tdates[i[2]] + timedelta(hours=17), end_dt=i[2] + timedelta(hours=16), csv_file_name=the_dir2) while not td.is_finished(): api.wait_update() # print("progress: tick:%.2f%%" % td.get_progress()) print("done:" + the_dir) api.close() with open(the_dir2, 'rb') as f: with gzip.GzipFile(filename=the_dir2 + ".gz", mode='w', compresslevel=9) as gf: content = f.read() gf.write(content) os.remove(the_dir2) del td del api gc.collect()
def scrawl_day_tick(date, ex): agg = agg_future_dayk() logging.info("start filter existed symbols") path = TICK_PATH logging.info("start getting tick data") api = TqApi(account=TqSim()) logging.info(ex + ": start getting tick") currentYearData = agg.getCurrentYearData(ex) currentYearData = currentYearData[currentYearData['date'] == date] pathpair = list( map( lambda x: (x[1].strftime('%Y%m%d') + "-" + x[0], x[0], datetime.utcfromtimestamp(x[1].timestamp())), currentYearData[['symbol', 'date']].values)) trading_dates = get_trading_calendar(security_type="future", exchange="shfe") tdates = {} for i in range(len(trading_dates)): if i > 0: tdates[datetime.strptime(trading_dates[i], '%Y%m%d')] = datetime.strptime( trading_dates[i - 1], '%Y%m%d') for i in pathpair: if i[1].startswith("sc"): continue the_dir1 = os.path.join(path, ex.upper(), str(i[2].year)) if not os.path.exists(the_dir1): os.makedirs(the_dir1) the_dir = os.path.join(path, ex.upper(), str(i[2].year), i[0] + ".csv.gz") the_dir2 = os.path.join(path, ex.upper(), str(i[2].year), i[0] + ".csv") # print(the_dir) if not os.path.exists(the_dir): td = DataDownloader(api, symbol_list=[ex.upper() + "." + i[1]], dur_sec=0, start_dt=tdates[i[2]] + timedelta(hours=17), end_dt=i[2] + timedelta(hours=15), csv_file_name=the_dir2) while not td.is_finished(): api.wait_update() # print("progress: tick:%.2f%%" % td.get_progress()) print("done:" + the_dir) logging.info(ex + ": complete getting tick")
from tqsdk.lib import TargetPosTask ''' 连续3根阴线就做空,连续3根阳线就做多,否则空仓 ''' api = TqApi("SIM") # 设定连续多少根阳线/阴线 length = 3 # 获得 rb1901 10秒K线的引用, 长度为 length+1 klines = api.get_kline_serial("SHFE.rb1901", 10, data_length = length + 1) # 创建 rb1901 的目标持仓 task,该 task 负责调整 rb1901 的仓位到指定的目标仓位, offset_priority的用法详见文档 target_pos = TargetPosTask(api, "SHFE.rb1901", offset_priority="今昨开") while True: api.wait_update() # 只有在新创建出K线时才判断开平仓条件 if api.is_changing(klines[-1], "datetime"): # 将K线转为pandas.DataFrame, 跳过最后一根刚生成的K线 df = klines.to_dataframe()[:-1] # 比较收盘价和开盘价,判断是阳线还是阴线 # df["close"] 为收盘价序列, df["open"] 为开盘价序列, ">"(pandas.Series.gt) 返回收盘价是否大于开盘价的一个新序列 up = df["close"] > df["open"] down = df["close"] < df["open"] if all(up): print("连续阳线: 目标持仓 多头1手") # 设置目标持仓为正数表示多头,负数表示空头,0表示空仓 target_pos.set_target_volume(1) elif all(down): print("连续阴线: 目标持仓 空头1手") target_pos.set_target_volume(-1)