def loadDomesticIndex(self, name, beforeStr, endStr): crawler = NaverCrawler.create(targetName=name) date = NaverDate.create(startDate=beforeStr, endDate=endStr) data = crawler.crawling(dateData=date) df = pd.DataFrame(columns=['종가', '전일비', '등락률', '거래량', '거래대금']) for v in data: df.loc[v.index()] = v.value() return df
def loadDomesticIndex(self, name, beforeStr, endStr): if not os.path.isfile(name): print(name, 'collect...') crawler = NaverCrawler.create(targetName=name.split('_')[0]) date = NaverDate.create(startDate=beforeStr, endDate=endStr) data = crawler.crawling(dateData=date) df = pd.DataFrame(columns=['종가', '전일비', '등락률', '거래량', '거래대금']) for v in data: df.loc[v.index()] = v.value() df.to_hdf(name, key='df', mode='w') else: print(name, 'read...') df = pd.read_hdf(name, key='df') return df
def showGraphK10KOSPI200(): k10_price = k10FromDate() k10_info = makeK10() k10_historical_mc = k10_price * k10_info['Outstanding'] * k10_info[ 'Floating'] k10 = pd.DataFrame() k10['k10 Market Cap'] = k10_historical_mc.sum(axis=1) k10['k10'] = k10['k10 Market Cap'] / k10['k10 Market Cap'][0] * 100 crawler = NaverCrawler.create(targetName='KPI200') date = NaverDate.create(startDate='2019-01-02', endDate='2019-05-04') kospi200 = crawler.crawling(dateData=date) kospi200Close = { NaverDate.formatDate(item.date): item.close for item in kospi200 } k200 = pd.DataFrame({'K200': kospi200Close}) plt.figure(figsize=(10, 5)) plt.plot(k10['k10']) plt.plot(k200['K200'] / k200['K200'][0] * 100) plt.legend(loc=0) plt.grid(True, color='0.7', linestyle=':', linewidth=1)
def makeDataFrame(): crawler = NaverCrawler.create(targetName='KPI200') date = NaverDate.create(startDate='2019-01-02', endDate='2019-05-04') kospi200 = crawler.crawling(dateData=date) kospi200Close = { NaverDate.formatDate(item.date): item.close for item in kospi200 } worldDate = NaverDate.create(startDate='2019-01-02', endDate='2019-05-04') worldCrawler = NaverWorldCrawler.create(targetName='SPI@SPX') sp500 = worldCrawler.crawling(dateData=worldDate) sp500Close = { NaverDate.formatDate(item.date): item.close for item in sp500 } data = {'S&P500': sp500Close, 'KOSPI200': kospi200Close} df = pd.DataFrame(data) df = df.fillna(method='ffill') if df.isnull().values.any(): df = df.fillna(method='bfill') return df
# print('현금', restMoney) # print('total', restMoney + stockMoney) # print('수익률', (stockMoney + restMoney)/money ) moneydf = pd.DataFrame([[stockMoney + restMoney, stockMoney, restMoney]], index=[current], columns=['total', 'stock', 'rest']) moneySum = pd.concat([moneySum, moneydf]) money = stockMoney + restMoney printPd('##수익률', stockValue / beforeValue) printPd('##소유주식', stockWallet) printPd('##주식가격', moneyWallet) printPd('##Total', moneySum) # In[14]: 코스피 200 가져오기 crawler = NaverCrawler.create(targetName='KPI200') date = NaverDate.create(startDate=startDateStr, endDate=endDateStr) kospi200 = crawler.crawling(dateData=date) df = pd.DataFrame(columns=['종가', '전일비', '등락률', '거래량', '거래대금']) for v in kospi200: df.loc[v.index()] = v.value() df # In[15]: 날짜 normalize moneySum.index = moneySum.index.map(lambda dt: pd.to_datetime(dt.date())) # moneySum # df.loc[moneySum.index, :] # df.index # In[16]: 연평균 수익률 portfolio = moneySum['total'] / moneySum['total'].iloc[0]
# In[3]: test prices = dict() date = NaverDate.create(startDate='1997-06-01') crawler = NaverStockCrawler.create('035720', logging=True) data = crawler.crawling(date) prices['카카오'] = { pd.to_datetime(item.date, format='%Y-%m-%d'): item.close for item in data } topdf = pd.DataFrame(prices) topdf # crawler = NavarSearchCodeCrawler.create('KODEX') # data = crawler.crawling() # data # In[4]: test2 print('collect...') crawler = NaverCrawler.create(targetName='KOSPI') date = NaverDate.create(startDate='2007-01-01', endDate='2019-12-31') data = crawler.crawling(dateData=date) print(data) # In[5]: pbr test crawler = NaverPbrCrawler() data = crawler.crawling('005930') print(data) # In[6]: #%%