Esempio n. 1
0
def job_7():
    try:
        print("I'm working......龙虎榜数据")
        # 每日龙虎榜列表
        top_list = ts.top_list(today)
        data = pd.DataFrame(top_list)
        data.to_sql('top_list',engine,index=True,if_exists='replace')
        print("每日龙虎榜列表......done")

        # 个股上榜统计
        cap_tops = ts.cap_tops()
        data = pd.DataFrame(cap_tops)
        data.to_sql('cap_tops',engine,index=True,if_exists='replace')
        print("个股上榜统计......done")

        # 营业部上榜统计
        broker_tops = ts.broker_tops()
        data = pd.DataFrame(broker_tops)
        data.to_sql('broker_tops',engine,index=True,if_exists='replace')
        print("营业部上榜统计......done")

        # 机构席位追踪
        inst_tops = ts.inst_tops()
        data = pd.DataFrame(inst_tops)
        data.to_sql('inst_tops',engine,index=True,if_exists='replace')
        print("机构席位追踪......done")

        # 机构成交明细
        inst_detail = ts.inst_detail()
        data = pd.DataFrame(inst_detail)
        data.to_sql('inst_detail',engine,index=True,if_exists='replace')
        print("机构成交明细......done")
    except Exception as e:
        print(e)
Esempio n. 2
0
def get_broker_tops():
    df = ts.broker_tops()
    df.to_sql('broker_tops',
              engine,
              if_exists='append',
              index=False,
              index_label='code')
Esempio n. 3
0
 def store_top_data(self, trading_date=None):
     """龙虎榜数据: 龙虎榜数据接口提供历史龙虎榜上榜股票数据"""
     trading_date = self.last_trading_date if trading_date is None else trading_date
     # 每日龙虎榜列表
     print('top_list...')
     top_df = ts.top_list(self.stock_date_format(trading_date))
     self.mysqlUtils.append_data(top_df, 'top_list')
     # 个股上榜统计
     print('cap_tops...')
     cap_tops_df = ts.cap_tops()
     cap_tops_df['date'] = trading_date
     self.mysqlUtils.append_data(cap_tops_df, 'cap_tops')
     # 营业部上榜统计
     print('broker_tops...')
     broker_tops_df = ts.broker_tops()
     broker_tops_df['date'] = trading_date
     self.mysqlUtils.append_data(broker_tops_df, 'broker_tops')
     # 龙虎榜机构席位追踪
     print('inst_tops...')
     inst_tops_df = ts.inst_tops()
     inst_tops_df['date'] = trading_date
     self.mysqlUtils.append_data(inst_tops_df, 'inst_tops')
     # 龙虎榜机构席位成交明细
     print('inst_detail...')
     inst_detail_df = ts.inst_detail()
     self.mysqlUtils.append_data(inst_detail_df, 'inst_detail')
Esempio n. 4
0
def broker(days=5):
    b = ts.broker_tops()
    print(b)

    date_str = str(date.today())
    #b.to_excel("C:\\Users\\enhua\\Desktop\\python_script\\broker(" + date_str + ").xlsx", sheet_name=date_str)

    return b
Esempio n. 5
0
def broker_tops(days,retry_count,pause):
    try:
        df = ts.broker_tops(days,retry_count,pause)
        engine = create_engine('mysql://*****:*****@127.0.0.1/stock?charset=utf8')
        df.to_sql('broker_tops', engine, if_exists='append')
        print "message"
    except Exception, e:
        e.message
Esempio n. 6
0
def getdragontigerdata():
    curday = datetime.date.today()
    curdate = curday.strftime('%Y%m%d')
    print(curdate)

    mylogger = getmylogger()

    # 每日龙虎榜列表
    df = ts.top_list(curday.strftime('%Y-%m-%d'))
    if df is not None:
        df['date'] = curdate
        tosql(df, 'toplistdata', "append", "每日龙虎榜数据", mylogger)
    else:
        mylogger.info("没有每日龙虎榜数据。")

    # 个股上榜统计
    for i in [5, 10, 30, 60]:
        df = ts.cap_tops(i)
        logmsg = "个股上榜数据" + "%d日:" % i
        if df is not None:
            df['date'] = curdate
            df['period'] = i
            tosql(df, 'captops', "append", logmsg, mylogger)
        else:
            mylogger.info("没有" + logmsg)

    # 营业部上榜统计
    for i in [5, 10, 30, 60]:
        df = ts.broker_tops(i)
        logmsg = "营业部上榜数据" + "%d日:" % i
        if df is not None:
            df['date'] = curdate
            df['period'] = i
            tosql(df, 'brokertops', "append", logmsg, mylogger)
        else:
            mylogger.info("没有" + logmsg)

    # 机构席位追踪
    for i in [5, 10, 30, 60]:
        df = ts.inst_tops(i)
        logmsg = "机构席位追踪数据" + "%d日:" % i
        if df is not None:
            df['date'] = curdate
            df['period'] = i
            tosql(df, 'instops', "append", logmsg, mylogger)
        else:
            mylogger.info("没有" + logmsg)

    # 机构成交明细
    df = ts.inst_detail()
    logmsg = "机构成交明细:"
    if df is not None:
        df['date'] = curdate
        tosql(df, 'instdetail', "append", logmsg, mylogger)
    else:
        mylogger.info("没有机构成交明细。")
Esempio n. 7
0
def after_market_close(context):
    log.info("hello world!")
    ret = ts.broker_tops()
    name_se = ret["broker"]
    for index, item in enumerate(name_se):
        if "绍兴证券营业部" in item\
        or"深圳益田路荣超" in item\
        or "西藏东方财富证券股份有限公司拉萨团结路第二证券营业部" in item:

            log.info(ret.ix[index])
Esempio n. 8
0
def test():
    ts.get_sz50s()
    ts.get_hs300s()
    ts.get_zz500s()
    ts.realtime_boxoffice()
    ts.get_latest_news()
    ts.get_notices(tk)
    ts.guba_sina()
    ts.get_cpi()
    ts.get_ppi()
    ts.get_stock_basics()
    ts.get_concept_classified()
    ts.get_money_supply()
    ts.get_gold_and_foreign_reserves()
    ts.top_list()  #每日龙虎榜列表
    ts.cap_tops()  #个股上榜统计
    ts.broker_tops()  #营业部上榜统计
    ts.inst_tops()  # 获取机构席位追踪统计数据
    ts.inst_detail()
Esempio n. 9
0
def download_brokerage(time ):
    list=[5,10,30]
    for i in list:
        Datas_b=  ts.broker_tops(days= i)
        files_path = '../report/Brokerage/%s'%time
        
        if os.path.exists(files_path) == False: # 判断文件是不是存在
            os.mkdir(files_path)                # 创建目录
        Datas_b.to_csv(files_path+'/%s_Brokerage%sD_csv.csv'%(time,i),encoding='gbk')
        with pd.ExcelWriter(files_path+'/%s_Brokerage%sD_xlx.xlsx'%(time,i)) as writer:
            Datas_b.to_excel(writer, sheet_name='Sheet1')
        print('\n%s %s营业厅数据 have been saved'%(time,i))
Esempio n. 10
0
def get_lbh_broker_tops(day):
    """
    营业部上榜统计
    获取营业部近5、10、30、60日上榜次数、累积买卖等情况。
    """
    df = ts.broker_tops(day)
    print(df)
    if df is not None:
        res = df.to_sql(lbh_broker_tops, engine, if_exists='replace')
        msg = 'ok' if res is None else res
        print('获取营业部近 {0} 日上榜次数、累积买卖等情况:{1}'.format(day, msg) + '\n')
    else:
        print('获取营业部近 {0} 日上榜次数、累积买卖等情况:{1}'.format(day, 'None') + '\n')
Esempio n. 11
0
def stk_broker_tops(days=5,table = 'stk_broker_tops_ma5'):
    print("\n插入数据:" + table)
    try:
        conn = connMysql().createconn()
        cur = conn.cursor()
        sql = 'truncate table ' + table + ';'
        cur.execute(sql)
        cur.close()
        conn.close()
        df = ts.broker_tops(days=days)
        df.to_sql(table,engine,if_exists='append')
    except:
        print("\n收集营业部上榜统计失败")
Esempio n. 12
0
def get_broker_tops(days=5, retry_count=RETRY_COUNT, pause=PAUSE):
    """营业部上榜统计"""

    logger.info('Begin get BrokerTops. Days is: %s.' % days)
    try:
        data_df = ts.broker_tops(days, retry_count, pause)
    except Exception as e:
        logger.exception('Error get BrokerTops. Days is: %s.' % days)
        return None
    else:
        data_dicts = []
        if data_df is None or data_df.empty:
            logger.warn('Empty get BrokerTops. Days is: %s.' % days)
        else:
            data_dicts = [{'broker': row[0], 'count': row[1],
                           'bamount': row[2], 'bcount': row[3], 'samount': row[4],
                           'scount': row[5], 'top3': row[6],
                           'days_type': days, 'insert_date': today_line}
                          for row in data_df.values]
            logger.info('Success get BrokerTops. Days is: %s.' % days)
        return data_dicts
Esempio n. 13
0
def top_type(top_type):
    today = datetime.datetime.today().strftime('%Y-%m-%d')
    if top_type == 'top_list':
        top_list = ts.top_list(today)
        if top_list is not None:
            top_list.to_sql('top_list',
                            engine,
                            flavor='mysql',
                            if_exists='append')
    elif top_type == 'cap_tops':
        cap_tops = ts.cap_tops()
        if cap_tops is not None:
            cap_tops['date'] = today
            cap_tops.to_sql('top_cap_tops',
                            engine,
                            flavor='mysql',
                            if_exists='append')
    elif top_type == 'broker_tops':
        broker_tops = ts.broker_tops()
        if broker_tops is not None:
            broker_tops['date'] = today
            broker_tops.to_sql('top_broker_tops',
                               engine,
                               flavor='mysql',
                               if_exists='append')
    elif top_type == 'inst_tops':
        inst_tops = ts.inst_tops()
        if inst_tops is not None:
            inst_tops['date'] = today
            inst_tops.to_sql('top_inst_tops',
                             engine,
                             flavor='mysql',
                             if_exists='append')
    elif top_type == 'inst_detail':
        inst_detail = ts.inst_detail()
        if inst_detail is not None:
            inst_detail.to_sql('top_inst_detail',
                               engine,
                               flavor='mysql',
                               if_exists='append')
Esempio n. 14
0
def download_brokerage(time):
    list = [5, 10, 30]
    for i in list:
        try:
            Datas_b = ts.broker_tops(days=i)
        except:
            continue
        files_path = '../report/Brokerage/%s' % time

        if os.path.exists(files_path) == False:  # 判断文件是不是存在
            os.makedirs(files_path)  # 创建目录
            print(files_path)
        if Datas_b is None:  #data cant download from WEB
            continue
        else:
            pass
        Datas_b.to_csv(files_path + '/%s_Brokerage%sD_csv.csv' % (time, i),
                       encoding='gbk')
        with pd.ExcelWriter(files_path + '/%s_Brokerage%sD_xlx.xlsx' %
                            (time, i)) as writer:
            Datas_b.to_excel(writer, sheet_name='Sheet1')
        print('\n%s %s营业厅数据 have been saved' % (time, i))
Esempio n. 15
0
def GetInstitutionCountOnBillBoardIn(days):
    """
    Args:
        days: days should be 5,10,30 or 60
    """
    return ts.broker_tops(days)
Esempio n. 16
0
days:统计周期5、10、30和60日,默认为5日
retry_count:当网络异常后重试次数,默认为3
pause:重试时停顿秒数,默认为0
返回值说明:

broker:营业部名称
count:上榜次数
bamount:累积购买额(万)
bcount:买入席位数
samount:累积卖出额(万)
scount:卖出席位数
top3:买入前三股票
'''
print('营业部上榜统计')
print(ts.broker_tops())
'''获取机构近5、10、30、60日累积买卖次数和金额等情况。

参数说明:

days:统计周期5、10、30和60日,默认为5日
retry_count:当网络异常后重试次数,默认为3
pause:重试时停顿秒数,默认为0
返回值说明:

code:代码
name:名称
bamount:累积买入额(万)
bcount:买入次数
samount:累积卖出额(万)
scount:卖出次数
Esempio n. 17
0
def broker_top(days=5, retry_count=5, pause=1):
    df = ts.broker_tops(days=days, retry_count=retry_count, pause=pause)
    return df
Esempio n. 18
0
         for idx in df.index:
             temp = df.ix[idx]
             sql = "insert into cap_tops(code,name,count,bamount,samount,net,bcount,scount) values(%s,%s,%s,%s,%s,%s,%s,%s)"
             param = (temp['code'],temp['name'],temp['count'],temp['bamount'],temp['samount'],temp['net'],temp['bcount'],temp['scount'])
             cursor.execute(sql, param)
             conn.commit()
 except:
     f=open("errors/"+today+".log",'a')
     traceback.print_exc(file=f)
     f.flush()
     f.close()
     
 # 营业部上榜统计    
 try:                
     df = idx = temp = sql = param = None
     df = ts.broker_tops()
     if df is not None:
         for idx in df.index:
             temp = df.ix[idx]
             sql = "insert into broker_tops(broker,count,bamount,bcount,samount,scount,top3) values(%s,%s,%s,%s,%s,%s,%s)"
             param = (temp['broker'],temp['count'],temp['bamount'],temp['bcount'],temp['samount'],temp['scount'],temp['top3'])
             cursor.execute(sql, param)
             conn.commit()
 except:
     f=open("errors/"+today+".log",'a')
     traceback.print_exc(file=f)
     f.flush()
     f.close()
 
 # 机构席位追踪    
 try:                
Esempio n. 19
0
def get_td_broker_list(days=5, retry=3, pause=0):
    """
    获取营业部买入卖出情况
    """
    data = ts.broker_tops(days=days, retry_count=retry, pause=pause)
    return data
Esempio n. 20
0
def capture_stock_data():
    capture_date = datetime.datetime.now().strftime("%Y%m%d")
    save_dir = "/home/dandelion/stock_data/" + capture_date

    if not os.path.exists(save_dir):
        os.mkdir(save_dir)
        print("The save directory is created successfully!\n", save_dir)
    print("The save directory is already exist!\n", save_dir)
    # ======================Daily Command================================================================
    # get the boxoffcie data of the last day and save as csvfile named as the capture command
    ts.day_boxoffice().to_csv(
        save_dir + "/" + capture_date + "_day_boxoffice.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("day_boxoffice data capture completed!")

    # get the cinema data of the last day and save as csvfile named as the capture command
    ts.day_cinema().to_csv(
        save_dir + "/" + capture_date + "_day_cinema.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("day_cinema data capture completed!")

    ts.month_boxoffice().to_csv(
        save_dir + "/" + capture_date + "_month_boxoffice.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("month_boxoffice data capture completed!")

    ts.realtime_boxoffice().to_csv(
        save_dir + "/" + capture_date + "_realtime_boxoffice.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("realtime_boxoffice data capture completed!")

    # get the stock data index of the last day and save as csvfile named as the capture command
    ts.get_index().to_csv(
        save_dir + "/" + capture_date + "_get_index.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_index data capture completed!")

    # get the history cpi data and save as csvfile named as the capture command
    ts.get_cpi().to_csv(
        save_dir + "/" + capture_date + "_get_cpi.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_cpi data capture completed!")

    # get the history gdp data  by month and save as csvfile named as the capture command
    ts.get_gdp_year().to_csv(
        save_dir + "/" + capture_date + "_get_gdp_year.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_gdp_year data capture completed!")

    # get today all stock data and save as csvfile named as the capture command
    # ts.get_today_all().to_csv(save_dir+'/'+capture_date+'_get_today_all.csv',header=True,sep=',',index=False)

    # get detail information of the top brokers today and save as csvfile named as the capture command
    ts.broker_tops().to_csv(
        save_dir + "/" + capture_date + "_broker_tops.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("broker_tops data capture completed!")

    # get detail information of the top brokers today and save as csvfile named as the capture command
    ts.cap_tops().to_csv(
        save_dir + "/" + capture_date + "_cap_tops.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("cap_tops data capture completed!")

    ts.get_area_classified().to_csv(
        save_dir + "/" + capture_date + "_get_area_classified.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_area_classified data capture completed!")

    # ts.get_balance_sheet(code='').to_csv(save_dir+'/'+capture_date+'_get_balance_sheet.csv',header=True,sep=',',index=False)
    # print('get_balance_sheet data capture completed!')

    # ts.get_cash_flow(code='').to_csv(save_dir+'/'+capture_date+'_get_cash_flow.csv',header=True,sep=',',index=False)
    # print('get_cash_flow data capture completed!')

    ts.get_day_all().to_csv(
        save_dir + "/" + capture_date + "_get_day_all.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_day_all data capture completed!")
    ts.get_cashflow_data(2018, 3).to_csv(
        save_dir + "/" + capture_date + "_get_cashflow_data.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_cashflow_data data capture completed!")
    ts.get_concept_classified().to_csv(
        save_dir + "/" + capture_date + "_get_concept_classified.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_concept_classified data capture completed!")
    ts.get_debtpaying_data(2018, 3).to_csv(
        save_dir + "/" + capture_date + "_get_debtpaying_data.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_debtpaying_data data capture completed!")
    ts.get_deposit_rate().to_csv(
        save_dir + "/" + capture_date + "_get_deposit_rate.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_deposit_rate data capture completed!")

    ts.get_gdp_contrib().to_csv(
        save_dir + "/" + capture_date + "_get_gdp_contrib.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_gdp_for().to_csv(
        save_dir + "/" + capture_date + "_get_gdp_for.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_gdp_pull().to_csv(
        save_dir + "/" + capture_date + "_get_gdp_pull.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_gdp_quarter().to_csv(
        save_dir + "/" + capture_date + "_get_gdp_quarter.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("get_gdp_ data capture completed!")
    # ts.get_gdp_year().to_csv(save_dir+'/'+capture_date+'_get_gdp_year.csv',header=True,sep=',',index=False)
    ts.get_gem_classified().to_csv(
        save_dir + "/" + capture_date + "_get_gem_classified.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_gold_and_foreign_reserves().to_csv(
        save_dir + "/" + capture_date + "_get_gold_and_foreign_reserves.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_growth_data(2018, 3).to_csv(
        save_dir + "/" + capture_date + "_get_growth_data.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_industry_classified().to_csv(
        save_dir + "/" + capture_date + "_get_industry_classified.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_hs300s().to_csv(
        save_dir + "/" + capture_date + "_get_hs300s.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_sz50s().to_csv(
        save_dir + "/" + capture_date + "_get_sz50s.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_zz500s().to_csv(
        save_dir + "/" + capture_date + "_get_zz500s.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_operation_data(2018, 3).to_csv(
        save_dir + "/" + capture_date + "_get_operation_data.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_stock_basics().to_csv(
        save_dir + "/" + capture_date + "_get_stock_basics.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.get_report_data(2018, 3).to_csv(
        save_dir + "/" + capture_date + "_get_report_data.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.inst_detail().to_csv(
        save_dir + "/" + capture_date + "_inst_detail.csv",
        header=True,
        sep=",",
        index=False,
    )
    ts.inst_tops().to_csv(
        save_dir + "/" + capture_date + "_inst_tops.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("inst_tops data capture completed!")
    ts.new_stocks().to_csv(
        save_dir + "/" + capture_date + "_new_stocks.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("new_stocks data capture completed!")
    ts.top_list().to_csv(
        save_dir + "/" + capture_date + "_top_list.csv",
        header=True,
        sep=",",
        index=False,
    )
    print("top_list data capture completed!")
Esempio n. 21
0
import pymongo
import json

#    ts.get_cash_flow : get specialed Code Cash_flow in the History .
# 600547:山东黄金
# 600362:江西铜业
# 600312:平高电气
# 600499:科达洁能
# 603993:洛阳钼业
db = "TopRanks"
coll = "BrokerTop"
date = '2017-12-22'
tops = 5

conn = pymongo.MongoClient('127.0.0.1', port=27017)
df = ts.broker_tops(tops)
# index data columns(X columns)
dicIndex = json.loads(df.to_json(orient='split'))
for i, ind in enumerate(dicIndex['index']):
    jsonstr = {
        '_id': date + "-" + dicIndex['data'][i][0] + "-" + str(tops),
        'Date': date,
        dicIndex['columns'][0]: dicIndex['data'][i][0],
        dicIndex['columns'][1]: dicIndex['data'][i][1],
        dicIndex['columns'][2]: dicIndex['data'][i][2],
        dicIndex['columns'][3]: dicIndex['data'][i][3],
        dicIndex['columns'][4]: dicIndex['data'][i][4],
        dicIndex['columns'][5]: dicIndex['data'][i][5],
        dicIndex['columns'][6]: dicIndex['data'][i][6]
    }
    try:
Esempio n. 22
0
from sqlalchemy import create_engine
import tushare as ts
#engine = create_engine('mysql+pymysql://root:[email protected]/packageing?charset=utf8')
engine = create_engine(
    'mysql+pymysql://root:[email protected]/gupiao?charset=utf8')

df = ts.inst_detail()
de = ts.inst_tops()
broker = ts.broker_tops()

#df.to_sql('inst_datail',engine)
#de.to_sql('inst_tops',engine)
#broker.to_sql('broker_tops',engine)

df.to_sql('inst_datail', engine, if_exists='append')
de.to_sql('inst_tops', engine, if_exists='append')
#broker.to_sql('broker_tops',engine,if_exists='append')
Esempio n. 23
0
    days:统计周期5、10、30和60日,默认为5日
    retry_count:当网络异常后重试次数,默认为3
    pause:重试时停顿秒数,默认为0

返回值说明:

    broker:营业部名称
    count:上榜次数
    bamount:累积购买额(万)
    bcount:买入席位数
    samount:累积卖出额(万)
    scount:卖出席位数
    top3:买入前三股票
'''
ts.broker_tops()
# 机构席位追踪
'''
获取机构近5、10、30、60日累积买卖次数和金额等情况。

参数说明:

    days:统计周期5、10、30和60日,默认为5日
    retry_count:当网络异常后重试次数,默认为3
    pause:重试时停顿秒数,默认为0

返回值说明:

    code:代码
    name:名称
    bamount:累积买入额(万)
Esempio n. 24
0
import tushare as ts
from sqlalchemy import *
token = ts.set_token('c723069dd4a25402d05ea6afad36da2937111adf012f8258abb5f7e05936e575')
engine = create_engine('mysql+pymysql://root:toor@localhost/pack?charset=utf8')

#df = ts.new_stocks()
#df.to_sql('new_stocks',engine,if_exists='append')

#holdings = ts.fund_holdings(2015,3)
#holdings.to_sql('fund_holdings',engine,if_exists='append')

#df = ts.get_industry_classified()
#df.to_sql('industry_classified',engine,if_exists='append')


#top = ts.top_list('2016-04-21')                                                         #每日龙虎榜列表
cap = ts.cap_tops()                                                                     #个股上榜统计,
broker = ts.broker_tops()                                                               #营业部上榜统计
detail = ts.inst_detail()                                                               #机构成交明细

#top.to_sql('top_list',engine,if_exists = 'append')
cap.to_sql('cap_tops',engine,if_exists = 'append')
broker.to_sql('broker_tops',engine,if_exists = 'append')
detail.to_sql('inst_detail',engine,if_exists = 'append')
Esempio n. 25
0
def lhb_broker():
    fs = ts.broker_tops()
    return fs
Esempio n. 26
0
File: tops.py Progetto: cnslyq/ts
	try:
		df = ts.cap_tops(freq)
		df['date'] = today
		df['freq'] = freq
		df = df.set_index('code', drop='true')
		df.to_sql('tops_stock',engine,if_exists='append')
		print
		tsl.log("tops_stock done")
	except BaseException, e:
		print
		print e
		tsl.log("tops_stock error")
	
	tsl.log("tops_broker start...")
	try:
		df = ts.broker_tops(freq)
		df['date'] = today
		df['freq'] = freq
		df = df.set_index('date', drop='true')
		df.to_sql('tops_broker',engine,if_exists='append')
		print
		tsl.log("tops_broker done")
	except BaseException, e:
		print
		print e
		tsl.log("tops_broker error")
	
	tsl.log("tops_inst_seat start...")
	try:
		df = ts.inst_tops(freq)
		df['date'] = today
Esempio n. 27
0
from sqlalchemy import create_engine
import tushare as ts
engine = create_engine('mysql+pymysql://root:[email protected]/packageing?charset=utf8')

df = ts.inst_detail()
de = ts.inst_tops()
broker = ts.broker_tops()

df.to_sql('inst_datail',engine,if_exists = 'append')
de.to_sql('inst_tops',engine,if_exists = 'append')
broker.to_sql('broker_tops',engine,if_exists = 'append')
Esempio n. 28
0
    def longhubang(self):

        df = ts.broker_tops(days=5)
        df_save = df.sort_values(by='count', ascending=False)
        df_save.to_excel("longhubang-10day.xlsx")
Esempio n. 29
0
 def getBrokerTops(self):
     file_name = 'broker_tops.csv'
     path = self.index + self.index_broker_tops + file_name
     data = ts.broker_tops()
     data.to_csv(path, encoding='utf-8')
     print(file_name)
Esempio n. 30
0
    def longhubang(self):

        df = ts.broker_tops(days=5)
        df_save = df.sort_values(by='count', ascending=False)
        df_save.to_excel("longhubang-10day.xlsx")
Esempio n. 31
0
def Getyingyeshangbang():
    df = ts.broker_tops()
    df = df.to_json(force_ascii=False)
    print(df)
    return df