def updateStockToDB(): spx_db = '{}/datas/db/{}.db'.format(path, config.get('database', 'spx')) stocks_db = '{}/datas/db/{}.db'.format(path, config.get('database', 'stocks')) spx = sql_data.readDB(spx_db, 'spx') start = date.get_year_ago() end = date.now() yesterday = dt.datetime.strptime(date.get_yesterday(), '%Y-%m-%d') last_trad_day = date.get_current_day() stock_list = spx["code"].tolist() datas = sql_data.readDB(stocks_db, stock_list) # 更新股票 while datas: df = datas[0] if isinstance(df, str): data = download(code=df, start=start, end=end, source="yahoo") if data is not None: data["code"] = df data = analysis.stock_analysis(data, 20, 60, 120) sql_data.dataToDB(stocks_db, data, df) datas.pop(0) continue last_day = dt.datetime.strptime(df['date'].values[-1], '%Y-%m-%d') code = df['code'].values[0] print(code, last_day, last_trad_day) if last_day < yesterday and last_trad_day != last_day: print('更新:', code) new_df = download(code=code, start=last_day, end=yesterday, source='yahoo') last_trad_day = new_df.index[-1].to_pydatetime() # 如果不是最新时间则更新DB new_df = new_df.reset_index() new_df.rename( columns={ "Date": "date", "Open": "open", "High": "high", "Low": "low", "Close": "close", "Volume": "vol", }, inplace=True, ) new_df = new_df[~np.isnan(new_df["close"])] new_df.drop(['Adj Close'], inplace=True, axis=1) # 重设index为str,为concat准备 new_df[["date"]] = new_df[["date"]].astype(str) # 连接df final_data = pd.concat([ df.set_index(["date"]).sort_index(), new_df.set_index(["date"]).sort_index() ]) # 删除重复的index final_data = final_data[~final_data.index.duplicated(keep='last')] final_data['code'] = final_data['code'].values[0] final_data = analysis.stock_analysis(final_data, 20, 60, 120) sql_data.dataToDB(stocks_db, final_data, final_data['code']) datas.pop(0)
def updateMarketBreadthToDB(): # 读取spxDB spx_db = '{}/datas/db/{}.db'.format(path, config.get('database', 'spx')) stocks_db = '{}/datas/db/{}.db'.format(path, config.get('database', 'stocks')) spx = sql_data.readDB(spx_db, 'spx') stock_list = spx["code"].tolist() datas = sql_data.readDB(stocks_db, stock_list) spx_info_data = sql_data.readDB(spx_db, 'spx') column_label = list(sp500_dict.keys()) start = date.get_3year_ago() end = date.now() row_index = pd.date_range(start=start, end=end, freq='D') df = pd.DataFrame(0, index=row_index, columns=column_label) temp_df = pd.DataFrame(0, index=datas[0]['date'].tolist(), columns=['temp']) temp_df.index = pd.to_datetime(temp_df.index) while datas: data = datas[0] data = data.set_index(["date"]).sort_index() data.index = pd.to_datetime(data.index) industry = spx_info_data.loc[spx_info_data['code'] == data['code']. values[0]]['sp_sector'].values[0] # temp_df['is_above_s_ma'] = data['is_above_s_ma'] data = pd.concat([temp_df, data], axis=1) data['is_above_s_ma'].fillna(0, inplace=True) df[industry] += data['is_above_s_ma'] datas.pop(0) # print(df.tail(15)) df = analysis.market_breadth_analysis(spx_info_data, df) # 保存marketDB market_breadth_db = '{}/datas/db/{}.db'.format( path, config.get('database', 'market_breadth')) sql_data.dataToDB(market_breadth_db, df, config.get('tablename', 'market_breadth'))
import dash_html_components as html import copy import dash_table import dash_bootstrap_components as dbc import pandas as pd import os from datas import sql_data, symbol_data from config.config import * path = os.path.abspath('.') col = symbol_data.sp500_dict col['date'] = 'Date 日期' col['total'] = 'Total 总数' market_breadthDB = '{}/datas/db/{}.db'.format( path, config.get('database', 'market_breadth')) df = sql_data.readDB(market_breadthDB, config.get('tablename', 'market_breadth')) df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d') df['date'] = df.date.dt.strftime('%Y-%m-%d') def table_style(df): styles = [] for col in df.columns: num = 100 t = 1 if col == 'total': num = 1100 x = copy.deepcopy(num) for j in range(1, 22): if t < 0.1: t = round(t + 0.09, 2)
import dash_html_components as html import os from datas import sql_data from config.config import * import pandas as pd path=os.path.abspath('.') dictDF = pd.read_csv(path + "/config/financial_data_dict.csv") table_1 = 'TREAST' table_2 = 'FEDDT' table_3 = 'WSHOMCB' table_4 = 'SWPT' financeDB = '{}/datas/db/{}.db'.format(path, config.get('database','finance')) treast = sql_data.readDB(financeDB, table_1) feddt = sql_data.readDB(financeDB, table_2) wshomcb = sql_data.readDB(financeDB, table_3) swpt = sql_data.readDB(financeDB, table_4) info1 = dictDF.loc[dictDF['Symbol'] == table_1]['Introduc'].values[0] info2 = dictDF.loc[dictDF['Symbol'] == table_2]['Introduc'].values[0] info3 = dictDF.loc[dictDF['Symbol'] == table_3]['Introduc'].values[0] info4 = dictDF.loc[dictDF['Symbol'] == table_4]['Introduc'].values[0] fed_assets_graph = dcc.Graph( figure=dict( data=[ dict( x=treast['date'],
def updateFinanceToDB(): # dict_db = '{}/datas/db/{}.db'.format(path, config.get('database', 'data_dict')) finance_db = '{}/datas/db/{}.db'.format(path, config.get('database', 'finance')) # dict_data = sql_data.readDB(dict_db, config.get('tablename', 'data_dict')) dict_data = pd.read_csv(path + "/config/financial_data_dict.csv") finance_list = dict_data["Symbol"].tolist() start = date.get_10year_ago() end = date.get_current_day() yesterday = dt.datetime.strptime(date.get_yesterday(), '%Y-%m-%d') last_trad_day = date.get_current_day() vti = download('VTI', start=start, end=end) vti = vti.reset_index() vti.rename( columns={ "Date": "date", "Open": "open", "High": "high", "Low": "low", "Close": "close", "Volume": "vol", }, inplace=True, ) vti['VTI'] = vti['close'] vti = vti[~np.isnan(vti['VTI'])] while finance_list: symbol = finance_list[0] data_source = dict_data[dict_data["Symbol"] == symbol].values[0][5] if data_source == '-' or symbol == '-': finance_list.pop(0) continue if data_source == 'custom': # print('更新custom金融数据') symbol_names = symbol.split("/") data_1 = sql_data.readDB(finance_db, symbol_names[0]) data_1.sort_index(inplace=True) data_2 = sql_data.readDB(finance_db, symbol_names[1]) data_2.sort_index(inplace=True) data_1[symbol] = data_1[symbol_names[0]] / data_2[symbol_names[1]] data = analysis.updateFinanceData(symbol, vti, data_1) sql_data.dataToDB(finance_db, data_1, symbol) finance_list.pop(0) continue # 从数据库中读取table old_data = sql_data.readDB(finance_db, symbol) if old_data is not None: # old_data['date'] = pd.to_datetime(old_data['date']) try: last_day = dt.datetime.strptime(old_data['date'].values[-1], '%Y-%m-%d') except: last_day = dt.datetime.strptime(old_data['date'].values[-1], '%Y-%m-%d %H:%M:%S') # last_day = old_data['date'].values[-1].astype(dt.datetime) # last_day=last_day.to_datetime() if last_day < yesterday: new_df = download(code=symbol, start=last_day, end=yesterday, source=data_source) last_trad_day = new_df.index[-1].to_pydatetime() if last_trad_day > last_day: print('更新金融数据') data = analysis.updateFinanceData(symbol, vti, new_df, old_data) sql_data.dataToDB(finance_db, data, symbol) # 创建新数据 else: print('创建金融数据') data = download(symbol, start=start, end=end, source=data_source) if data is not None: data = analysis.updateFinanceData(symbol, vti, data) sql_data.dataToDB(finance_db, data, symbol) print(symbol, data_source, last_trad_day) finance_list.pop(0)
dbc.Tooltip('概念股 Market Driver', target="thematic-p", placement="top"), dbc.Row(the) ])) col_items.append( dbc.Col([ dbc.Col(dbc.Alert(html.H4('Industry 行业板块'), color="dark", id='industry-p'), width=4), dbc.Tooltip('行业 Market Driver', target="industry-p", placement="right"), dbc.Row(ind) ])) return col_items symbol_list = dictDF.loc[(dictDF['Display'] == 'y') & (dictDF['Symbol'] != '-')]['Symbol'].tolist() symbol_list2 = dictDF.loc[(dictDF['Display'] == 'y') & (dictDF['Symbol'] != '-')]['Symbol'].tolist() financeDB = '{}/datas/db/{}.db'.format(path, config.get('database', 'finance')) datas = sql_data.readDB(financeDB, symbol_list, -30) page = html.Div( # className="container", children=list_item(symbol_list2, dictDF, datas))
introduc4 = dictDF.loc[dictDF['Symbol'] == table_4]['Introduc'].values[0] introduc5 = dictDF.loc[dictDF['Symbol'] == table_5]['Introduc'].values[0] introduc6 = dictDF.loc[dictDF['Symbol'] == table_6]['Introduc'].values[0] introduc7 = dictDF.loc[dictDF['Symbol'] == table_7]['Introduc'].values[0] introduc8 = dictDF.loc[dictDF['Symbol'] == table_8]['Introduc'].values[0] introduc9 = dictDF.loc[dictDF['Symbol'] == table_9]['Introduc'].values[0] introduc10 = dictDF.loc[dictDF['Symbol'] == table_10]['Introduc'].values[0] Explanation1 = dictDF.loc[dictDF['Symbol'] == table_1]['Explanation'].values[0] Explanation2 = dictDF.loc[dictDF['Symbol'] == table_2]['Explanation'].values[0] Explanation3 = dictDF.loc[dictDF['Symbol'] == table_3]['Explanation'].values[0] Explanation9 = dictDF.loc[dictDF['Symbol'] == table_9]['Explanation'].values[0] Explanation10 = dictDF.loc[dictDF['Symbol'] == table_10]['Explanation'].values[0] # 失业数据 ccsa = sql_data.readDB(financeDB, table_1) icsa = sql_data.readDB(financeDB, table_2) # 经济指数 wei = sql_data.readDB(financeDB, table_3) # 联储收益率 dgs10 = sql_data.readDB(financeDB, table_4) dgs5 = sql_data.readDB(financeDB, table_5) # 利率&CPI effr = sql_data.readDB(financeDB, table_6) cpi = sql_data.readDB(financeDB, table_7) us10YY = sql_data.readDB(financeDB, table_8) cg = sql_data.readDB(financeDB, table_9) og = sql_data.readDB(financeDB, table_10) # CPI同比 cpi['CPI(YoY)'] = cpi[table_7].diff() cg.dropna(inplace=True)