def get_benchmark_data(benchmark, start_date, end_data): if Settings.data_source == DataSource.DXDataCenter: benchmark_data = api.GetIndexBarEOD(instrumentIDList=benchmark, startDate=start_date, endDate=end_data, field=['closePrice']) elif Settings.data_source == DataSource.DataYes: import os import tushare as ts try: ts.set_token(os.environ['DATAYES_TOKEN']) except KeyError: raise mt = ts.Market() benchmark_data = mt.MktIdxd(benchmark, beginDate=start_date.replace('-', ''), endDate=end_data.replace('-', ''), field='closeIndex,tradeDate') benchmark_data = benchmark_data.set_index('tradeDate') benchmark_data = benchmark_data.rename(columns={'closeIndex': 'closePrice'}) benchmark_data.index = pd.to_datetime(benchmark_data.index, format="%Y-%m-%d") return benchmark_data
def run(): ts.set_token(ct.DATA_YES_TOKEN) st = ts.Market() today = datetime.strftime(datetime.today(),"%Y%m%d") stock_list = st.MktEqud(tradeDate="20160513",field="ticker,PE,secShortName") if not isinstance(stock_list,pd.DataFrame) or stock_list.empty: return stock_list['ticker'] = stock_list['ticker'].map(lambda x: str(x).zfill(6)) result = [] mongo = Mongo() db = mongo.getDB() for i in stock_list.index: code = stock_list.loc[i,'ticker'] pe = stock_list.loc[i,'PE'] name = stock_list.loc[i,'secShortName'] if np.isnan(pe): continue cursor = db.year_min_value.find({"ticker":code}) if cursor.count() <= 0: continue pe_list = [] for row in cursor: pe_list.append(row['pe']) min_pe = min(pe_list) rate = (pe - min_pe)/min_pe result.append({"code":code,"name":name,"pe":pe,"min_pe":min_pe,"rate":rate}) df = pd.DataFrame(result) if db.lowest_pe_stock.find().count() > 0: db.lowest_pe_stock.remove() db.lowest_pe_stock.insert(json.loads(df.to_json(orient='records')))
def import_datayes_daily_data(start_date, end_date, cont_list = [], is_replace = False): numdays = (end_date - start_date).days + 1 date_list = [start_date + datetime.timedelta(days=x) for x in range(0, numdays) ] date_list = [ d for d in date_list if (d.weekday()< 5) and (d not in misc.CHN_Holidays)] for d in date_list: cnt = 0 dstring = d.strftime('%Y%m%d') ts.set_token(misc.datayes_token) mkt = ts.Market() df = mkt.MktFutd(tradeDate = dstring) if len(df.ticker) == 0: continue for cont in df.ticker: if (len(cont_list) > 0) and (cont not in cont_list): continue data = df[df.ticker==cont] if len(data) == 0: print 'no data for %s for %s' % (cont, dstring) else: data_dict = {} data_dict['date'] = d data_dict['open'] = float(data.openPrice) data_dict['close'] = float(data.closePrice) data_dict['high'] = float(data.highestPrice) data_dict['low'] = float(data.lowestPrice) data_dict['volume'] = int(data.turnoverVol) data_dict['openInterest'] = int(data.openInt) if data_dict['volume'] > 0: cnt += 1 db.insert_daily_data(cont, data_dict, is_replace = is_replace, dbtable = 'fut_daily') print 'date=%s, insert count = %s' % (d, cnt)
def get_equity_eod(instruments, start_date, end_date): if Settings.data_source == DataSource.DXDataCenter: from DataAPI import api data = api.GetEquityBarEOD(instrumentIDList=instruments, startDate=start_date, endDate=end_date, field='closePrice', instrumentIDasCol=True, baseDate='end') elif Settings.data_source == DataSource.DataYes: import os import tushare as ts try: ts.set_token(os.environ['DATAYES_TOKEN']) except KeyError: raise mt = ts.Market() res = [] for ins in instruments: data = mt.MktEqud(ticker=ins, beginDate=start_date.replace('-', ''), endDate=end_date.replace('-', ''), field='tradeDate,ticker,closePrice') res.append(data) data = pd.concat(res) data['tradeDate'] = pd.to_datetime(data['tradeDate'], format='%Y-%m-%d') data['ticker'] = data['ticker'].apply(lambda x: '{0:06d}'.format(x)) data.set_index(['tradeDate', 'ticker'], inplace=True, verify_integrity=True) data = data.unstack(level=-1) return data
def getNews(self): token = '60517739976b768e07823056c6f9cb0fee33ed55a1709b3eaa14a76c6a1b7a56' ts.set_token(token) print(ts.get_token()) mkt = ts.Market() df = mkt.TickRTSnapshot(securityID='000001.XSHE') print(df)
def quanshan(): #Failed ts.set_token('de0596189f600d1dc59c509e5b6a1387e4e29cb6225697a25ef9d5d2a425d854') ts.get_token() mt = ts.Market() print(mt) df = mt.TickRTSnapshot(securityID='000001.XSHE') print(df)
def set_universe(code, refDate=None): if Settings.data_source != DataSource.DXDataCenter: import tushare as ts ts.set_token('2bfc4b3b06efa5d8bba2ab9ef83b5d61f1c3887834de729b60eec9f13e1d4df8') idx = ts.Idx() return list(idx.IdxCons(secID=code, field='consID')['consID']) else: from DataAPI import api data = api.GetIndexConstitutionInfo(code, refDate=refDate).sort_values('conSecurityID') return list(data.conSecurityID)
def ipo(): ts.set_token('b4c94429dc00fee32d14c52507d9cd44c9621ca91eaa161fcec14041') pro = ts.pro_api() # 查询当前所有正常上市交易的股票列表 df = pro.stock_basic(exchange_id='', list_status='L', fields='ts_code,symbol,name,area,industry,list_date') print(df) data = df.to_dict('index') print(data.items()) for item, value in sorted(data.items()): code = value['symbol'] ipo_date = value['list_date'] Stock.objects(code=code).update_one(code=code, ipo_date=ipo_date, upsert=True)
def set_token(token=None): try: if token is None: # 从~/.quantaxis/setting/config.ini中读取配置 token = QASETTING.get_config('TSPRO', 'token', None) else: QASETTING.set_config('TSPRO', 'token', token) ts.set_token(token) except: if token is None: print('请设置tushare的token') else: print('请升级tushare 至最新版本 pip install tushare -U')
def set_universe(code, refDate=None): if Settings.data_source != DataSource.DXDataCenter: import os import tushare as ts try: ts.set_token(os.environ['DATAYES_TOKEN']) except KeyError: raise idx = ts.Idx() return list(idx.IdxCons(secID=code, field='consID')['consID']) else: from DataAPI import api data = api.GetIndexConstitutionInfo(code, refDate=refDate).sort_values('conSecurityID') return list(data.conSecurityID)
def __init__(self, **kwargs): super(DataYesMarketDataHandler, self).__init__(kwargs['logger'], kwargs['symbolList']) if kwargs['token']: ts.set_token(kwargs['token']) else: try: token = os.environ['DATAYES_TOKEN'] ts.set_token(token) except KeyError: raise ValueError("Please input token or set up DATAYES_TOKEN in the envirement.") self.idx = ts.Idx() self.startDate = kwargs['startDate'].strftime("%Y%m%d") self.endDate = kwargs['endDate'].strftime("%Y%m%d") self._getDatas() if kwargs['benchmark']: self._getBenchmarkData(kwargs['benchmark'], self.startDate, self.endDate)
def get_stockholder(code, start, end): # 十大非流通 global pro try: stockholder = pro.top10_holders(ts_code=code, start_date=start, end_date=end) time.sleep(1) stockfloat = pro.top10_floatholders(ts_code=code, start_date=start, end_date=end) time.sleep(1) except Exception as e: print(e) time.sleep(10) ts.set_token(token) pro = ts.pro_api() else: if stockholder.empty and stockfloat.empty: return pd.DataFrame(), pd.DataFrame() else: return stockholder, stockfloat
def fetch_(): ts.set_token(cfg.get_datayes_key()) eq = ts.Equity() df = eq.Equ(equTypeCD='A', listStatusCD='L', field='ticker') df['ticker'] = df['ticker'].map(lambda x: str(x).zfill(6)) start, end = '20150901', '20160326' # thread can not make full use of cpu for i, row in df.iterrows(): csv = cfg.get_ratio_table_path(row['ticker'], start, end) if os.path.exists(csv): print("{0}/{1} {2} exists.".format(i, len(df.index), row['ticker'])) continue # code_queue.put(row['ticker']) # code_list.append(str(row['ticker'])) proc_queue.put(row['ticker']) # for i in range(3): # thread = ExportThread(thread_id, code_queue, start, end) # thread.start() # threads.append(thread) # thread_id += 1 # # get_code_and_export(0, code_queue, start, end) # # for t in threads: # t.join() # # print("Exit main thread.") processes = 4 for i in range(processes): p = ExportProcess(i+1, start, end, proc_queue) p.start() procs.append(p) for p in procs: p.join() print("Exit main")
def export_ratio_table(code, start, end, thread_id): # queue_lock.acquire() ts.set_token(cfg.get_datayes_key()) mkt = ts.Market() # print("exporting " + code + " from " + start + " to " + end) st = time.time() df = mkt.MktEqud(ticker=code, beginDate=start, endDate=end, field='ticker,tradeDate,preClosePrice,openPrice,highestPrice,lowestPrice,closePrice') print(" Thread {0} fetch online: {1}".format(thread_id, time.time()-st)) # queue_lock.release() # df = ts.get_h_data(code, start, end) # print(df) wave_ratio_df = pd.DataFrame(columns=["max_ratio", "min_ratio"]) for i, row in df.iterrows(): dict = wv.calc_wave_ratio(row["preClosePrice"], row["openPrice"], row["highestPrice"], row["lowestPrice"]) wave_ratio_df.loc[row["tradeDate"]] = dict st = time.time() idx_col = wv.calc_ratio_table_index_and_columns(max_ratio=0.03, min_ratio=-0.03) index, columns = idx_col["index"], idx_col["columns"] ratio_table = wv.calc_ratio_table(wave_ratio_df, index, columns) print(" Thread {0} calc ratio table: {1}".format(thread_id, time.time()-st)) st = time.time() length_ratio_df = wv.calc_length_ratio(ratio_table, len(wave_ratio_df.index)) print(" Thread {0} calc length ratio: {1}".format(thread_id, time.time()-st)) # write csv st = time.time() ratio_table.to_csv(cfg.get_ratio_table_path(code, start, end)) length_ratio_df.to_csv(cfg.get_length_ratio_path(code, start, end)) # print(" save csv: {0}".format(time.time()-st)) return length_ratio_df
def setToken(token): ts.set_token(token) print(ts.get_token())
import tushare as ts import pymysql import time import datetime from sqlalchemy import create_engine thistbname = 'tb_moneyflowhsgt' thistbinfo = '沪深港通资金流向' ts.set_token('7eb4bc05a48bb2704d76c1b79c501053b58ad1b190b505faa9009d5c') pro = ts.pro_api() con = pymysql.connect(user='******', password='******', database='fundamentalplatform', charset='utf8') cu = con.cursor() nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) nowdate = time.strftime("%Y%m%d", time.localtime()) ''' engine = create_engine("mysql+pymysql://{}:{}@{}/{}".format('root', 'lksjlksj', 'localhost', 'fundamentalplatform')) con = engine.connect() df = pro.moneyflow_hsgt(trade_date='20190315') print(df) df.to_sql(name=thistbname, con=con, if_exists='append', index=False) ''' try: cu.execute( 'select lastdate from tb_index where tbname = "{tbname}"'.format( tbname=thistbname)) lastdate = cu.fetchall()[0][0]
import matplotlib.pyplot as plt import tushare as ts import datetime dt=datetime.datetime.now().strftime('%Y%m%d') symbol='300072' symbol='300330' symbol='600868' symbol='002384' symbol='002430' symbol='002382' # df1 = ts.get_today_ticks(symbol) # df2 = ts.get_tick_data(symbol,date=dt) # df3 = ts.get_hist_data(symbol) df = ts.get_realtime_quotes(symbol) ts.set_token('7e30dd0a070cd4306193a5925ec5b3c250a694f08ea390d7cc3af2d6') pro = ts.pro_api() df = pro.fina_mainbz(ts_code='000627.SZ', period='20171231', type='P') # df = pro.index_basic('sw') # df = pro.cashflow(ts_code='600000.SH', start_date='20180101', end_date='20180730') # df =ts.moneyflow_hsgt() # ts.cashflow(symbol) print df.describe() # ts.get_k_data # ts.get_hist_data # ts.get_today_all # print df1,df2 # df2.to_csv('%s.today.csv'%symbol,encoding ='gbk' ) # print df # exit()
return dd, hh, mm, ss # 主函数 if __name__ == '__main__': # 计时开始 time_start = time.time() # 建立数据库连接,设置tushare的token,定义一些初始化参数 db = pymysql.connect(host='localhost', user='******', passwd='your password', db='your dbname', charset='utf8mb4') cursor = db.cursor() ts.set_token('your token') pro = ts.pro_api() # 选取回测区间 year = 2019 date_seq_start = str(year) + '-07-24' date_seq_end = str(year) + '-08-23' # 计算一个真实时间间隔, 用于折算Sharp Rate中对应的无风险利率 dt_start = datetime.datetime.strptime(date_seq_start, '%Y-%m-%d') dt_end = datetime.datetime.strptime(date_seq_end, '%Y-%m-%d') delta_dt = (dt_end - dt_start).days # 设定需要进行回测的股票池, 取云计算相关的: 中国软件, 中兴通讯, 浪潮信息, 用友网络, 宝信软件 # 高端的策略在于选股, 目前没有能力完成 stock_pool = [
import tushare as ts import pandas as pd import os import numpy as np import time from tqdm import tqdm """ 获取历史数据 """ mytoken = '' ts.set_token(mytoken) ts.set_token(mytoken) save_path = 'stock' pro = ts.pro_api() def getNoramlData(): #获取基础信息数据,包括股票代码、名称、上市日期、退市日期等 pool = pro.stock_basic(exchange='', list_status='L', adj='qfq', fields='ts_code,symbol,name,area,industry,fullname,list_date, market,exchange,is_hs') #print(pool.head()) # 因为穷没开通创业板和科创板权限,这里只考虑主板和中心板 pool = pool[pool['market'].isin(['主板', '中小板'])].reset_index() pool.to_csv(os.path.join(save_path, 'company_info.csv'), index=False, encoding='utf-8') print('获得上市股票总数:', len(pool)-1)
def _login(): token = '0b23bc848cf9b8e79df3b0d90c2406ddff1ffd2cc30835b1782be1177f3804fb' ts.set_token(token)
import tushare as ts import quantdata.cons as ct from quantdata.db.mongo import Mongo from datetime import datetime from quantdata import logger import time import json import pymongo ts.set_token(ct.DATA_YES_TOKEN) def run(): '''get qoute data ''' #set log LOGGER_NAME = "HISTORY_DATA" mylog = logger.getLogger(LOGGER_NAME) #get the stock list today = datetime.strftime(datetime.today(),"%Y%m%d") mongo = Mongo() db = mongo.getDB() cursor = db.stock_list.find({"listStatusCD":"L"}) for row in cursor: ticker = str(row['ticker']) mylog.info("update history data of %s"%(ticker)) exchangeCD = str(row['exchangeCD']) listDate = str(row['listDate']).replace("-", "").replace("NaN", "") if exchangeCD == 'XSHG' and not ticker.startswith("6"): continue
# coding=utf-8 # Token=728e54671e5284ce12bb098ea5d481b266570256d5430b53750093af import tushare as ts ts.set_token('728e54671e5284ce12bb098ea5d481b266570256d5430b53750093af') pro=ts.pro_api() #df=pro.query('daily',ts_code='000835.SZ',start_date='20170101',end_date='20180801') df=ts.get_hist_data('000835') print(df) #df.to_csv('..\\..\\Quant\\stock\\Data\\NewDown\\000835.sz.csv')
def get_tushare_api(): ts.set_token('9303ab9ddece253dc96ac6f4662f22a1d0d92579f1d18368f87aaf65') return ts.pro_api()
import matplotlib.pyplot as plt import numpy as np import pandas as pd import tushare as ts ts.set_token('c2ce9df72eb324c01e809da43379650bbc25f3a0f0c2ef9aa75f6baa') #获取数据 s_pf = '600848' s_gd = '601818' sdate = '2016-01-01' edate = '2016-12-31' pro = ts.pro_api() #print(ts.get_hist_data('600848')) #一次性获取全部日k线数据 # res=ts.pro_bar(ts_code='000001.SZ', adj='qfq', start_date='20180101', end_date='20181011') # print(res.close) df_pf = ts.pro_bar(ts_code='002407.SZ', adj='qfq', start_date='20180101', end_date='20190521').sort_index(axis=0, ascending=True) df_gd = ts.pro_bar(ts_code='002092.SZ', adj='qfq', start_date='20190101', end_date='20190521').sort_index(axis=0, ascending=True) # df_pf=ts.get_h_data(s_pf,start=sdate,end=edate) # df_gd=ts.get_h_data(s_gd,start=sdate,end=edate).sort_index(axis=0,ascending=True) # print(df_pf.close) #df=pd.concat([df_pf.close,df_gd.close],axis=1,keys=['pf_close','gd_close']) df = pd.concat([df_pf.close], axis=1, keys=['pf_close']) df.ffill(axis=0, inplace=True) #填充数据 df.to_csv('pf_gd.csv')
def get_portfolio(stock_list,state_dt,para_window): # 建数据库连接,设置Tushare的token db = pymysql.connect(host='127.0.0.1', user='******', passwd='root', db='stock', charset='utf8') cursor = db.cursor() ts.set_token('519ade7742d87ada41e0aba865dbecfadeed0a4443f6f88ff6a300a2') pro = ts.pro_api() portfilio = stock_list # 建评估时间序列, para_window参数代表回测窗口长度 model_test_date_start = (datetime.datetime.strptime(state_dt, '%Y-%m-%d') - datetime.timedelta(days=para_window)).strftime( '%Y%m%d') model_test_date_end = (datetime.datetime.strptime(state_dt, "%Y-%m-%d")).strftime('%Y%m%d') df = pro.trade_cal(exchange_id='', is_open=1, start_date=model_test_date_start, end_date=model_test_date_end) date_temp = list(df.iloc[:, 1]) model_test_date_seq = [(datetime.datetime.strptime(x, "%Y%m%d")).strftime('%Y-%m-%d') for x in date_temp] list_return = [] for i in range(len(model_test_date_seq)-4): ti = model_test_date_seq[i] ri = [] for j in range(len(portfilio)): sql_select = "select * from stock_all a where a.stock_code = '%s' and a.state_dt >= '%s' and a.state_dt <= '%s' order by state_dt asc" % (portfilio[j], model_test_date_seq[i], model_test_date_seq[i + 4]) cursor.execute(sql_select) done_set = cursor.fetchall() db.commit() temp = [x[3] for x in done_set] base_price = 0.00 after_mean_price = 0.00 if len(temp) <= 1: r = 0.00 else: base_price = temp[0] after_mean_price = np.array(temp[1:]).mean() r = (float(after_mean_price/base_price)-1.00)*100.00 ri.append(r) del done_set del temp del base_price del after_mean_price list_return.append(ri) # 求协方差矩阵 cov = np.cov(np.array(list_return).T) # 求特征值和其对应的特征向量 ans = np.linalg.eig(cov) # 排序,特征向量中负数置0,非负数归一 ans_index = copy.copy(ans[0]) ans_index.sort() resu = [] for k in range(len(ans_index)): con_temp = [] con_temp.append(ans_index[k]) content_temp1 = ans[1][np.argwhere(ans[0] == ans_index[k])[0][0]] content_temp2 = [] content_sum = np.array([x for x in content_temp1 if x >= 0.00]).sum() for m in range(len(content_temp1)): if content_temp1[m] >= 0 and content_sum > 0: content_temp2.append(content_temp1[m]/content_sum) else: content_temp2.append(0.00) con_temp.append(content_temp2) # 计算夏普率 sharp_temp = np.array(copy.copy(list_return)) * content_temp2 sharp_exp = sharp_temp.mean() sharp_base = 0.04 sharp_std = np.std(sharp_temp) if sharp_std == 0.00: sharp = 0.00 else: sharp = (sharp_exp - sharp_base) / sharp_std con_temp.append(sharp) resu.append(con_temp) return resu
import datetime import tushare as ts # 导入 tushare 模块 import pymysql # 导入 pymysql 模块 import numpy as np # 连接 mysql 数据库 database : stock db = pymysql.connect(host='127.0.0.1', user='******', passwd='112358', db='stock') cursor = db.cursor() # 设置tushare pro的token并获取连接 ts.set_token('ae9283163f806c321f2f4aa00a3c5cce84e69aa44eb2873d4863498e') pro = ts.pro_api() # 设定获取日线行情的初始日期和终止日期,其中终止日期设定为昨天。 start_dt = '20100101' time_temp = datetime.datetime.now() - datetime.timedelta(days=1) end_dt = time_temp.strftime('%Y%m%d') stocks = pro.daily('ts_code', start_date=start_dt, trade_date=end_dt) stock_pool = np.array(stocks['ts_code']) # list total = len(stock_pool)
import json import time from concurrent.futures import ThreadPoolExecutor import tushare as ts from sqlalchemy import text from clients import clients from settings import API_TOKEN ts.set_token(API_TOKEN) pro = ts.pro_api() def update_or_insert(data): update_sql = "update stock_basic set ts_code=:ts_code, stock_name=:stock_name, area=:area, " \ "industry=:industry, list_date=:list_date where stock_id=:stock_id" insert_sql = "insert into stock_basic(ts_code, stock_id, stock_name, area, industry, list_date) " \ "values (:ts_code, :stock_id, :stock_name, :area, :industry, :list_date)" with clients.mysql_db.connect() as conn: result = conn.execute(text(update_sql), **data) stock_id = data.get('stock_id') if result.rowcount > 0: return '[update | {}]: {}'.format( stock_id, json.dumps(data, ensure_ascii=False)) conn.execute(text(insert_sql), **data) return '[update | {}]: {}'.format(stock_id, json.dumps(data, ensure_ascii=False))
def set_universe(code): import tushare as ts ts.set_token('2bfc4b3b06efa5d8bba2ab9ef83b5d61f1c3887834de729b60eec9f13e1d4df8') idx = ts.Idx() return list(idx.IdxCons(secID=code, field='consID')['consID'])
import tushare as ts import MySQLdb from datetime import date import datetime print datetime.datetime.now() conn = MySQLdb.connect(host="localhost", user="******", passwd="root", db="stock", charset="utf8") cursor = conn.cursor() ts.set_token('e8596c92be7248552f8fa6b4af32f5c8eed01e2044b0962313fdaec5e69e5d5c') mt = ts.Master() df = mt.TradeCal(exchangeCD='XSHG', beginDate='20160101', endDate='20161231', field='exchangeCD,calendarDate,isOpen,prevTradeDate,isWeekEnd,isMonthEnd,isQuarterEnd,isYearEnd') if df is not None: for idx in df.index: temp = df.ix[idx] sql = "insert into trade_cal(exchangeCD,calendarDate,isOpen,prevTradeDate,isWeekEnd,isMonthEnd,isQuarterEnd,isYearEnd) \ values(%s,%s,%s,%s,%s,%s,%s,%s)" param = (temp['exchangeCD'],temp['calendarDate'],temp['isOpen'],temp['prevTradeDate'],temp['isWeekEnd'],temp['isMonthEnd'],temp['isQuarterEnd'],temp['isYearEnd']) cursor.execute(sql, param) conn.commit() cursor.close() conn.close() print datetime.datetime.now()
def __init__(self): ts.set_token('c44e067e293a18b4b6852036dbaf87979112fa2615bb2a8d1cdb3b63') self.pro = ts.pro_api()
# @File : stockholder_info.py # 股东信息获取 import pandas as pd import time import pymysql import tushare as ts import config from setting import get_mysql_conn conn = get_mysql_conn('db_stock', 'local') cursor = conn.cursor() token = config.token ts.set_token(token) pro = ts.pro_api() def get_stock_list(): df = pro.stock_basic(exchange='', list_status='L', fields='ts_code,symbol,name,area,industry,list_date') return dict(zip(list(df['ts_code'].values), list(df['name'].values))) # 生产日期 2000到2018 def create_date(): start_date = '20{}0101' end_date = '20{}1231' date_list = [] for i in range(18, 0, -1):
def __apiConnect(self, config): """连接数据源""" if config: token = config.get('TOKEN', None) else: self.writeLog('tushare 连接参数读取错误,请检查') return False if token: # 设置tushare数据令牌 ts.set_token(token) try: self._api = ts.pro_api() # 构建合约信息字典 stock_df = self._api.stock_basic( exchange='', list_status='L', fields= 'ts_code,symbol,name,exchange,list_status,list_date,delist_date' ) if stock_df is None or stock_df.empty: self.writeLog('未取得可交易的合约信息,请检查参数设置及网络连接状态') self._api = None return False stock_df.set_index('ts_code', inplace=True) self._contractDict = stock_df.T.to_dict() # 构建指数信息字典 index_SSE_zh = self._api.index_basic( market='SSE', category='综合指数', fields='ts_code,name,market,category,list_date,desc') index_SSE_gm = self._api.index_basic( market='SSE', category='规模指数', fields='ts_code,name,market,category,list_date,desc') index_SZSE_zh = self._api.index_basic( market='SZSE', category='综合指数', fields='ts_code,name,market,category,list_date,desc') index_SZSE_gm = self._api.index_basic( market='SZSE', category='规模指数', fields='ts_code,name,market,category,list_date,desc') df = pd.concat( [index_SSE_zh, index_SSE_gm, index_SZSE_zh, index_SZSE_gm], ignore_index=True) df.set_index('ts_code', inplace=True) self._indexDict = df.T.to_dict() # 构建交易日历 try: self._calendar = pd.read_csv('etc/calendar.csv').trade_date except Exception: cal = self.tradeCalendar() if cal is None: self.writeLog('获取交易日历失败,请检查') return False else: self._calendar = cal self._calendar.to_csv('etc/calendar.csv', header=True, index=False) return True except Exception as e: self.writeLog('程序错误: %s' % e) self._api = None return False else: self.writeLog('tushare 数据令牌未设置,请检查') return False
def set_token(): if ts.get_token() is None: ts.set_token('03d8d816cd281b447e2809dfbac371a992620752da35392f5ea41c1be5e3f827') print('已设置token凭证码')
def main(): ts.set_token('267addf63a14adcfc98067fc253fbd72a728461706acf9474c0dae29') pro = ts.pro_api() dict_300 = {} for i in range(14): dict_300[str(2007+i)+'0101'] = list(pro.index_weight(index_code='399300.SZ', start_date=str(2007+i)+'0101', end_date=str(2007+i)+'0110')['con_code'].iloc[:300]) dict_300[str(2007+i)+'0701'] = list(pro.index_weight(index_code='399300.SZ', start_date=str(2007+i)+'0625', end_date=str(2007+i)+'0701')['con_code'].iloc[:300]) dict_500 = {} for i in range(14): dict_500[str(2007+i)+'0101'] = list(pro.index_weight(index_code='000905.SH', start_date=str(2007+i)+'0101', end_date=str(2007+i)+'0201')['con_code'].iloc[:500]) dict_500[str(2007+i)+'0701'] = list(pro.index_weight(index_code='000905.SH', start_date=str(2007+i)+'0625', end_date=str(2007+i)+'0710')['con_code'].iloc[:500]) calendar = pro.trade_cal(exchange='') calendar = calendar[calendar['is_open'] == 1]['cal_date'] dict_industry = get_data.get_industry_stock_list() stock_list = get_data.get_sql_key() # prep_data_for_rf(stock_list, dict_industry, calendar, 1, dict_300, dict_500) stock_list_list = [] length = int(len(stock_list) / 24) for i in range(24): if i == 23: stock_list_list.append(stock_list[i*length:]) else: stock_list_list.append(stock_list[i*length: (i+1)*length]) p = Pool() for i in range(24): p.apply_async(prep_data_for_rf, args=(stock_list_list[i], dict_industry, calendar, i, dict_300, dict_500, )) p.close() p.join() data = pd.DataFrame() for i in range(24): con = db.connect('D:\\Data\\rf_temp_' + str(i) + '.sqlite') cur = con.cursor() data_temp = pd.read_sql_query( sql="SELECT * FROM '" + str(i) + "'", con=con ) data = data.append(data_temp) cur.close() con.close() con = db.connect('D:\\Data\\rf_data.sqlite') cur = con.cursor() data.to_sql( name='All_Data', con=con, if_exists='replace', index=False ) con.commit() data_test = data[data['date'] >= '20170101'] data_test.to_sql( name='test_data', con=con, if_exists='replace', index=False ) data_train = data[data['date'] < '20170101'] data_train.to_sql( name='train_data', con=con, if_exists='replace', index=False ) con.commit() cur.close() con.close() growth = 'roe_yoy,q_gr_yoy,q_sales_yoy,q_op_yoy,q_profit_yoy,q_netprofit_yoy,ocf_yoy,equity_yoy' balance_sheet = 'current_ratio,quick_ratio,cash_ratio,ca_to_assets,tbassets_to_totalassets,int_to_talcap,currentdebt_to_debt,longdeb_to_debt,ocf_to_shortdebt,debt_to_eqt,tangibleasset_to_debt,tangasset_to_intdebt,tangibleasset_to_netdebt,ocf_to_debt,ocf_to_interestdebt,longdebt_to_workingcapital,ebitda_to_debt' cashflow = 'inv_turn,ar_turn,ca_turn,fa_turn,assets_turn,ocf_to_or,ocf_to_opincome,q_ocf_to_or,q_ocf_to_sales' profit_quality = 'q_netprofit_margin,q_gsprofit_margin,q_exp_to_sales,q_profit_to_gr,q_saleexp_to_gr,q_adminexp_to_gr,q_finaexp_to_gr,q_impair_to_gr_ttm,q_gc_to_gr,q_op_to_gr,q_roe,q_dt_roe,q_opincome_to_ebt,q_investincome_to_ebt,q_dtprofit_to_profit,q_salescash_to_or' fund_factor = list(set((growth+','+balance_sheet+','+cashflow+','+profit_quality).split(',')) & set(data.columns))\ + ['tick', 'industry', 'stock_value_cat', 'date', 'fcfe', 'rd_exp_to_earning', 'pb', 'pe', 'ps', 'q_npta', 'cash_to_liqdebt', 'tax_to_ebt', 'cash_to_liqdebt_withinterest', 'return_rate'] tec_factor = list(set(data.columns)-set(fund_factor)) + ['tick', 'industry', 'stock_value_cat', 'date', 'return_rate'] industry_list = data_train['industry'].drop_duplicates().fillna('None') con = db.connect('D:\\Data\\rf_data_industry.sqlite') cur = con.cursor() for industry in industry_list: data_train[data_train['industry'] == industry][fund_factor].to_sql( name='train_data_' + industry, con=con, if_exists='replace', index=False ) data_test[data_test['industry'] == industry][fund_factor].to_sql( name='test_data_'+industry, con=con, if_exists='replace', index=False ) con.commit() cur.close() con.close() stock_mv_cat_list = data_train['stock_value_cat'].drop_duplicates().fillna('None') con = db.connect('D:\\Data\\rf_data_mv.sqlite') cur = con.cursor() for cat in stock_mv_cat_list: data_train[data_train['stock_value_cat'] == cat][tec_factor].to_sql( name='train_data_' + cat, con=con, if_exists='replace', index=False ) data_test[data_test['stock_value_cat'] == cat][tec_factor].to_sql( name='test_data_' + cat, con=con, if_exists='replace', index=False ) con.commit() cur.close() con.close() print('done') return None
df = ts.pro_bar(ts_code=tscodes[i], start_date=g_start_date, end_date=g_end_date, ma=[2, 3, 4, 5, 8, 10, 15, 20], factors=['tor', 'vr']) df.to_csv(save_path) time.sleep(0.5) def get_price(szcode, date=None): global root date = strtime_latest_trade_date(date) date = int(strtime_convert(date)) df = pd.read_csv(root + 'stock/hdailydata/hdaily-data/' + szcode + '.csv') pricedf = df.loc[df['trade_date'] == date, ['open', 'close']] if len(pricedf['open'].tolist()) > 0: return { 'date': date, 'open': pricedf['open'].tolist()[0], 'close': pricedf['close'].tolist()[0] } return {'date': -1, 'open': -1, 'close': -1} if __name__ == '__main__': ts.set_token('08aedc1cc54171e54a64bbe834ec1cb45026fa2ab39e9e4cb8208cad') pro = ts.pro_api( '08aedc1cc54171e54a64bbe834ec1cb45026fa2ab39e9e4cb8208cad') #download(pro) print(get_price('000010.SZ', '2019-08-30'))
mpl.rcParams['font.sans-serif'] = ['SimHei'] mpl.rcParams['axes.unicode_minus'] = False #引入TA-Lib库 import talib as ta #查看包含的技术指标和数学运算函数 #print(ta.get_functions()) #print(ta.get_function_groups()) ta_fun = ta.get_function_groups() ta_fun.keys() #使用tushare获取上证指数数据作为示例 import tushare as ts ts.set_token('a119134c895dca96f7caedef1de1fcf51409888f8df48aabf62c0399') pro = ts.pro_api() #df=ts.get_k_data('sh',start='2000-01-01') df = pro.query('daily', ts_code='600519.SH', start_date='20100101', end_date='20211217') #df.index=pd.to_datetime(df.date) df.index = pd.to_datetime(df.trade_date) df = df.sort_index() # RSI df["rsi"] = ta.RSI(df.close, timeperiod=14) ax = df[["close", "rsi"]].plot(secondary_y=['rsi'], figsize=(16, 8), title='RSI',
# 最高、最低、开盘、收盘、成交量、换手率、 # 资金流量 # ps 简单利用 get hist_data 即可啥都有了 就是近3年的 ### import pandas as pd import tushare as ts import numpy as np import matplotlib.pyplot as plt import matplotlib.dates as mdates import matplotlib.finance as mfinance from matplotlib.dates import YearLocator, MonthLocator,DayLocator, DateFormatter ts.set_token('6cbc132dcf304322dc7b6f1d714d792fe224049b2038abf29e7711bf71334b52') ## ------------------------------------ # 获取数据 # 利用tushare 接口 # sh=上证指数 sz=深圳成指 hs300=沪深300指数 sz50=上证50 zxb=中小板 cyb=创业板 # ## ------------------------------------ # 以 牧原股份, 国轩高科, 哈尔斯,宁波港 为例 candi_stock=ts.get_hist_data('002074',start='2014-01-01',end='2015-12-31') sh_index=ts.get_hist_data('sh',start='2014-01-01',end='2015-12-31') zxb_index=ts.get_hist_data('zxb',start='2014-01-01',end='2015-12-31') ## 获取 交易所日历信息 #mt = ts.Master()
def require_data(self): ts.set_token('6bf40c7acd89be6fadaba6ab8b06e22b988cb6fed50d552885df6f81') pro = ts.pro_api() self.df = pro.daily(self.share_dict['stack'], self.start_date, self.end_date) self.df.sort_index(by=['trade_date'], ascending = True)
# -*- coding:utf-8 -*- import tushare as ts token = '0e693d9bddaad8bf493fd3f19a04741337bcfc9bd3b686415938f866' ts.set_token(token) pro = ts.pro_api() data = pro.stock_basic(exchange='', list_status='L', fields='ts_code,symbol,name,area,industry,list_date') # df = ts.get_realtime_quotes(['300059', '399006', 'sh']) # print(df['code'][2] + " " + df['name'][2] + " " + str(round((float(df['price'][2]) - float(df['pre_close'][2])) / float(df['pre_close'][2]) * 100, 2)) + "%" + " ") # print(df['code'][1] + " " + df['name'][1] + " " + str(round((float(df['price'][1]) - float(df['pre_close'][1])) / float(df['pre_close'][1]) * 100, 2)) + "%" + " ") # print(df['code'][0] + " " + df['name'][0] + " " + str(round((float(df['price'][0]) - float(df['pre_close'][0])) / float(df['pre_close'][0]) * 100, 2)) + "%" + " ")
import tushare as ts from sqlalchemy import create_engine ts.set_token("0b8f33e64a5558e84bd5b7499d0d0d6417d11d7db5d7ae960889f00e") pro = ts.pro_api() list = pro.stock_basic( fields= 'ts_code,symbol,name,fullname,enname,exchange_id,curr_type,list_date,delist_date,is_hs,list_status' ) list.to_json('./share_list.json', orient='records') # engine = create_engine('mysql+pymysql://root:[email protected]/quantify?charset=utf8') # list.to_sql('shares_list', engine, if_exists='append')
{ "pp": "0", "IF": "0" } 2. 主力合约判断 运行程序后,点击‘合约初始化’按钮,程序会获取通联的期货数据,自动判断主力合约。并写入Contracts_init.json中。 注:通联选择的是持仓量判断主力,本程序选择的是昨日成交量判断,两者不同时会给出提示。 3. 合约订阅 4. Tick存储 ''' import json import os import pymongo import tushare as ts # ts.set_token('575593eb7696aec7339224c0fac2313780d8645f68b77369dcb35f8bcb419a0b') ts.set_token('ced15aa738976abf2136cc9e197fbcd34776e0f8183c7660b7fdcd626a715b3b') # paolo import time from uiBasicWidget import QtGui, QtCore, BasicCell from eventEngine import * from ctaAlgo.ctaBase import * from vtConstant import * from vtGateway import VtSubscribeReq ######################################################################## class DataRecorder(QtGui.QFrame): """ 用来记录历史数据的工具(基于CTA策略), 可单独运行, 本工具会记录Tick数据。 """
def __init__(self): ts.set_token(self.__TOKEN) self.pro = ts.pro_api()
import pymongo from datetime import datetime import tushare as ts import json import time import pandas as pd import numpy as np print('=======================11') # 建立连接 client = pymongo.MongoClient(host='localhost', port=27017) db_name = 'tushare_storage' #数据库名 database = client[db_name] #建立数据库 print('=======================22') ts.set_token( "1f5fcc75bfa0d0ddb8e3d7caab4c9623185529a5052e7671f3e9c7e2") #XXX为自己的token #pro = ts.pro_api() # 连接stock数据库,注意只有往数据库中插入了数据,数据库才会自动创建 #database = client.stock # 创建一个daily集合,类似于MySQL中"表"的概念 #daily = database["daily"] def MA(tsPrice, k): #MovingAverage计算 Sma = pd.Series(0.0, index=tsPrice.index) for i in range(k - 1, len(tsPrice)): Sma[i] = sum(tsPrice[(i - k + 1):(i + 1)]) / k return (Sma)
import pandas as pd import numpy as np import matplotlib.pyplot as plt st_number = '300454.SZ' first_money = 1000000 import tushare as ts ts.set_token('a339e517ed9b1cb97cda578c2ee8fa829ef50d13ae3623a113227777') pro = ts.pro_api() df = pro.daily(ts_code=st_number, start_date='20100701', end_date='20210208') df.to_csv(st_number + '.csv') df = pd.read_csv(st_number + '.csv', index_col='trade_date', parse_dates=['trade_date'])[['open', 'close', 'low', 'high']].sort_index() df['ma5'] = df['open'].rolling(5).mean() df['ma30'] = df['open'].rolling(30).mean() df = df.dropna() # df[['open', 'ma5', 'ma30']].plot() # plt.show() sre1 = df['ma5'] < df['ma30'] sre2 = df['ma5'] >= df['ma30'] golden_cross = df[sre1 & sre2.shift(1)].index
# coding: 'utf-8' __author__ = 'xlyang0211' import tushare as ts ts.set_token("efe5e687247788b99191f7fe13357d13b23e89a1df6989ec597d9b8c12a51403") print ts.get_token() print ts.get_industry_classified() # print fd
$nin不在范围内{'age': {'$nin': [20, 23]}} """ import tushare as ts import pymongo import time import sys client = pymongo.MongoClient(host='localhost', port=27017) db = client.ffgHarvester col = db.stockDaily calendarList = db.stockCalendar.find() todayStr = time.strftime("%Y%m%d", time.localtime()) ts.set_token('495bd6a4d40acef11e6a222a1632889b27c60938aa9decba468c472b') pro = ts.pro_api() def main(): print('开始获取日线行情') for dateObj in calendarList: isGetDaily = dateObj['isGetDaily'] cal_date = dateObj['cal_date'] if isGetDaily == False: #若还未获取数据,则获取k线数据 if cal_date <= todayStr: #获取当天之前的数据 is_open = dateObj['is_open'] if is_open == 1: df = pro.query("daily", trade_date=cal_date) #获取当日的全部股票数据 list = []
#!//bin/python # -*- coding: utf-8 -*- import types import urllib2 import json import datetime import time import DataAPI import pandas as pd import tushare as ts benchmarkMap = {"SH":"000001", "SH50":"000016", "SH180":"000010", "ZZ500":"000905", "SZ":"399001", "ZXB":"399101", "HS300":"399300", "FUND_SH":"000011", "FUND_SZ" :"399305"} ts.set_token('8a72d90cf841a0dcc4d2d4cf55e48c603e0b3d21295c5b0f93f4be846158d903') mt = ts.Master() def registerUrl(url): try: data = urllib2.urlopen(url).read() return data except Exception,e: print e def isNum(value): try: x = int(value) except TypeError: return False except ValueError: return False
# 更新日线数据, 周线数据, 月线数据 import tushare as ts from sqlalchemy import create_engine import time import datetime import pandas as pd from getStockDaily import getDailyOnDate from getStockWeekly import getWeeklyOnDate from getStockMonthly import getMonthlyOnDate from getStockAdjustFactor import getAdjustFactorOnDate ts.set_token('803f1548c1f25bf44c56644e4527a6d8cd3dbd8517e7c59e3aa1f6d0') pro = ts.pro_api() engine = create_engine( "mysql+pymysql://root:4401821211@localhost:3306/stock?charset=utf8") sqlstr = "SELECT max(trade_date) as maxdate FROM stock.daily2021" maxDate = pd.read_sql_query(sqlstr, con=engine).loc[0, 'maxdate'] print("previous stock data crawle date is %s, start updating!" % maxDate) begin = datetime.datetime.strptime(maxDate, '%Y%m%d') end = datetime.datetime.now() date = begin delta = datetime.timedelta(days=1) while date < end: date += delta getDailyOnDate(date, pro, engine) time.sleep(1)
def connect(self): ts.set_token(self.Token) self._ts = ts.pro_api() return 0
# -*- coding: utf-8 -*- """ Created on Mon Mar 4 09:39:53 2019 @author: Xzw E-mail: [email protected] """ import tushare as ts import pandas as pd import time # 退市股票序列 ts.set_token('############################################################') pro = ts.pro_api() delist_data = pro.stock_basic(exchange='', list_status='D', fields='ts_code,symbol,name,fullname,delist_date') delist_data.to_csv('./delist_data.csv', index=False) # 上市股票数据 list_data = pro.stock_basic(exchange='', list_status='L', fields='ts_code,symbol,name,fullname,delist_date') # 财务风险股票数据 st_data = list_data.drop([i for i in range(len(list_data)) if 'ST' not in list_data['name'][i]]) st_data.to_csv('./st_data.csv', index=False) # 随机挑选相同数量的非财务危机股票 normal_data = list_data.drop([i for i in range(len(list_data)) if 'ST' in list_data['name'][i]])
import matplotlib as mpl import matplotlib.pyplot as plt import tushare as ts # pip install mplfinance from mplfinance.original_flavor import candlestick_ohlc from matplotlib.pylab import date2num from glob import glob from os import path from datetime import timedelta # ggplot好看点 mpl.style.use("ggplot") DATE_FORMAT = "%Y%m%d" TS_TOKEN = "<你的Token>" ts.set_token(TS_TOKEN) pro = ts.pro_api(TS_TOKEN) def ohlc_plot(df, ax=None): if ax is None: ax = plt.gca() data_lst = [] for date, row in df.iterrows(): t = date2num(date) data = (t, ) + tuple(row) data_lst.append(data) candlestick_ohlc(ax, data_lst,
# -*- coding: utf-8 -*- """ Created on Mon May 23 09:27:16 2016 @author: Administrator """ from sqlalchemy import create_engine import datetime import tushare as ts ts.set_token('601be43bd14269103d558400372fc2a18d752d999d4e78a167335353abf79e8e') engine = create_engine('mysql://*****:*****@127.0.0.1/tstest?charset=utf8') #equ_info_A = ts.Equity().Equ(equTypeCD='A',field='') #equ_info_B = ts.Equity().Equ(equTypeCD='B',field='') #equ_info_A.to_sql('equ_info_a',con=engine, if_exists = 'replace', index = False, index_label = 'secID') #equ_info_B.to_sql('equ_info_b',con=engine, if_exists = 'replace', index = False, index_label = 'secID') today = datetime.date.today().strftime('%Y%m%d') mkt = ts.Market().MktEqud(tradeDate = today,field = '') try: mkt.to_sql('mkt_price', con=engine, if_exists = 'append', index = False) except Exception: pass
from datetime import datetime, timedelta import pymongo from pymongo.errors import ConnectionFailure from time import time from multiprocessing.pool import ThreadPool import json import os import csv from ctaBase import * from vtConstant import * from vtFunction import loadMongoSetting from datayesClient import DatayesClient import tushare as ts ts.set_token('') # 以下为vn.trader和通联数据规定的交易所代码映射 VT_TO_DATAYES_EXCHANGE = {} VT_TO_DATAYES_EXCHANGE[EXCHANGE_CFFEX] = 'CCFX' # 中金所 VT_TO_DATAYES_EXCHANGE[EXCHANGE_SHFE] = 'XSGE' # 上期所 VT_TO_DATAYES_EXCHANGE[EXCHANGE_CZCE] = 'XZCE' # 郑商所 VT_TO_DATAYES_EXCHANGE[EXCHANGE_DCE] = 'XDCE' # 大商所 DATAYES_TO_VT_EXCHANGE = {v:k for k,v in VT_TO_DATAYES_EXCHANGE.items()} ######################################################################## class HistoryDataEngine(object): """CTA模块用的历史数据引擎""" #----------------------------------------------------------------------
def __init__(self,parent=None): logging.debug("begin init") super(DatayesThread,self).__init__(parent) ts.set_token('edc37d879a4757aae38b00cf49cc2dffe936bf3efb0f700c3cbb1f798ec82d5d')
import tushare as ts ts.set_token('577c33e4d462eba6c110d77e39408eaa08f6e91c7e2cb4275fad192e374b1ddb') print('this is just only a test') st = ts.Market() df = st.MktEqud(tradeDate='20151009', field='ticker,secShortName,preClosePrice,openPrice,highestPrice,lowestPrice,closePrice,turnoverVol,turnoverRate') df['ticker'] = df['ticker'].map(lambda x: str(x).zfill(6)) df1 = st.MktEqud(tradeDate='20151008', field='ticker,secShortName,preClosePrice,openPrice,highestPrice,lowestPrice,closePrice,turnoverVol,turnoverRate') df1['ticker'] = df1['ticker'].map(lambda x: str(x).zfill(6)) good_stock = [0 for i in range(3000)] def is_star(open_price, close_price, lowest_price,highest_price): is_star = 0 if(open_price <= close_price): diff = (close_price - open_price)/open_price if(diff < 0.03): if(((open_price/lowest_price) > 1.01)and ((highest_price/close_price)> 1.01)): is_star = 1 else: diff = (open_price - close_price)/close_price if(diff < 0.03): if(((close_price/lowest_price) > 1.01)and ((highest_price/open_price)> 1.01)): is_star = 1 return is_star i = 0 for index,row in df1.iterrows(): openPrice = row['openPrice'] closePrice = row['closePrice'] lowestPrice = row['lowestPrice'] highestPrice = row['highestPrice'] if(row['openPrice'] == 0):
exp_norisk = 0.04 * (5.0 / 12.0) sharp_rate = (exp_portfolio - exp_norisk) / (std) return sharp_rate, std if __name__ == '__main__': # 建立数据库连接,设置tushare的token,定义一些初始化参数 db = pymysql.connect(host='127.0.0.1', user='******', passwd='admin', db='stock', charset='utf8') cursor = db.cursor() ts.set_token(const.TUSHARE_TOKEN) pro = ts.pro_api() year = 2018 date_seq_start = str(year) + '-03-01' date_seq_end = str(year) + '-03-10' stock_pool = [ '603912.SH', '300666.SZ', '300618.SZ', '002049.SZ', '300672.SZ' ] # 先清空之前的测试记录,并创建中间表 sql_wash1 = 'delete from my_capital where seq != 1' cursor.execute(sql_wash1) db.commit() sql_wash3 = 'truncate table my_stock_pool' cursor.execute(sql_wash3) db.commit()
def __init__(self, *args, **kwargs): config = ReadConfig().getConfig() self._token = config['tushare']['token'] ts.set_token(self._token) self.tusharePro = ts.pro_api()
import tushare as ts ts.set_token('af3678578bf2abfcbcd2e98c1825e665c64c44ebe51332e561203abe3a993d96')
# update tushare database import sys import time from os import path sys.path.append('/Users/linhua/PycharmProjects/Fupan') from private import tushare_token import tushare as ts import sqlite3 import pandas as pd # prepare Tushare data interface ts.set_token(tushare_token.tushare_token) pro = ts.pro_api() # # Database design: # table: concept_list # | concept_code | concept_name | src | # table: concept_dashboard # | concept_name (1) | concept_name (2) | # def update_concept_database(dpath): concept_list_df = pro.concept() concept_dashboard_code = pd.DataFrame() concept_dashboard_name = pd.DataFrame()
# -*- coding: utf-8 -*- """ Created on Mon Dec 9 23:17:43 2019 @author: 骨灰盒 """ import tushare as ts import numpy as np import time ts.set_token('7d6ac63764d82662169992641f582aab920fc6a0564f3147a773e8bf') pro = ts.pro_api() TODAY = time.strftime("%Y%m%d", time.localtime()) START_DATE = '20190801' #------------------------------------------------------------------------------ codes = pro.query('stock_basic', exchange='', list_status='L', market='', fields='ts_code,symbol,name,area,industry,list_date') my_codes = codes.ts_code my_codes.to_csv('code_list.csv') my_codes = my_codes[0:3] for code in my_codes: #----------------------策略区-------------------------------------------------- try: tmp_df = pro.query( 'daily',