def historico_index(self, indice, status='Close'): """ :param indice: índice que deseja analisar :param status: Pode ser Close / Open / Low / High / Volume / Currency / :return: Retorno dos dados """ if len(indice) > 1: dados = pd.DataFrame() for papel in indice: dados[papel] = inv.get_index_historical_data( papel, self.pais, from_date=self.data_inicial, to_date=self.data_final, as_json=False, order=self.ordem, interval=self.intervalo)[status] if self.json: data = dict() for colunas in dados.columns: paper = list() for infos in range(len(dados)): paper.append({ "data": dados.index[infos].strftime("%Y-%m-%d"), "preço": dados[colunas][infos] }) data[colunas] = paper return data else: return dados # to_json -> json.dumps(limpando, ensure_ascii=False).encode('utf8') else: dados = pd.DataFrame() for papel in indice: dados = inv.get_index_historical_data( papel, self.pais, from_date=self.data_inicial, to_date=self.data_final, as_json=False, order=self.ordem, interval=self.intervalo) if self.json: stock_json = dict() dados['Date'] = dados.index.strftime("%Y-%m-%d") stock_json[indice[0]] = dados.to_dict('records') return stock_json else: return dados
def __init__(self): self.data_atual = datetime.today() self.ifix = inv.get_index_historical_data( 'BM&FBOVESPA Real Estate IFIX', country='brazil', from_date=self.data_atual.strftime('%d/%m/2020'), to_date=self.data_atual.strftime('%d/%m/%Y'))
def get_Index(countryname,assetname): asset = investpy.get_index_historical_data(index=assetname,country=countryname,from_date='01/01/2010',to_date='01/01/2021') filename = assetname.replace(".", "") filename = filename.replace("/", "") filename = filename.replace(" ", "") filename = filename.replace("&", "") asset.to_csv(wd+'\\toto_data\\'+'index-'+countryname+'-'+filename+'.csv')
def get_asset_data(asset_list: list, from_date: str, to_date: str, asset_type: str, asset_df: pd.DataFrame) -> Tuple[list, pd.DataFrame]: # commodity, bond, currency # etfs and funds need country if asset_type == "Bonds": func = lambda a, s, e: investpy.get_bond_historical_data(a, s, e) elif asset_type == "Currencies": func = lambda a, s, e: investpy.get_currency_cross_historical_data( a, s, e) elif asset_type == "ETFs": func = lambda a, s, e, c: investpy.get_etf_historical_data(a, c, s, e) elif asset_type == "Funds": func = lambda a, s, e, c: investpy.get_fund_historical_data(a, c, s, e) elif asset_type == "Commodities": func = lambda a, s, e: investpy.get_commodity_historical_data(a, s, e) elif asset_type == "Indices": func = lambda a, s, e, c: investpy.get_index_historical_data( a, c, s, e) elif asset_type == "Crypto": func = lambda a, s, e: investpy.get_crypto_historical_data(a, s, e) df_list = [] for asset in asset_list: if asset_type != "ETFs" and asset_type != "Funds" and asset_type != "Indices": df = func(asset, from_date, to_date) df_list.append(df) else: country = get_attribute_investing(asset_df, asset, 'country') df = func(asset, from_date, to_date, country) df_list.append(df) close_list = [df.Close for df in df_list] # print(close_list) close_df = pd.concat(close_list, axis=1) close_df.columns = asset_list return df_list, close_df
def idx_data(index, country): df = investpy.get_index_historical_data(index=index, country=country, from_date=fromdate, to_date=tdy)['Close'] df = pd.DataFrame(df) df.columns = [index] return df
def hist_data_india(name): df = investpy.get_index_historical_data(index=name, country='India', from_date=oneyr, to_date=tdy)['Close'] df = pd.DataFrame(df) df.columns = [name] return df
def ftse100(date1, date2): df = ivpy.get_index_historical_data(index='FTSE 100', country='united kingdom', from_date=date1.strftime('%d/%m/%Y'), to_date=date2.strftime('%d/%m/%Y')) df = df.iloc[:, :4] df = df.rename(lambda x: 'UK FTSE 100: ' + x, axis='columns') return df
def getProfitThroughIndex(date, amount, Index): df = investpy.get_index_historical_data(index = Index, country='United States', from_date = date, to_date= datetime.today().strftime('%d/%m/%y%y')) begin = df.first('1D')['Open'].item() end = df.last('1D')['Close'].item() quote = end/begin return amount*quote-amount
def arca(date1, date2): df = ivpy.get_index_historical_data(index='ARCA Major Markets', country='united states', from_date=date1.strftime('%d/%m/%Y'), to_date=date2.strftime('%d/%m/%Y')) df = df.iloc[:, :4] df = df.rename(lambda x: 'US ARCA: ' + x, axis='columns') return df
def nasdaq(date1, date2): df = ivpy.get_index_historical_data(index='nasdaq', country='united states', from_date=date1.strftime('%d/%m/%Y'), to_date=date2.strftime('%d/%m/%Y')) df = df.iloc[:, :4] df = df.rename(lambda x: 'US SP500: ' + x, axis='columns') return df
def test_get_index_historical_data_dow_jones(): issue254 = investpy.get_index_historical_data(index='Dow Jones US', country='united states', interval='Daily', from_date='01/01/1996', to_date='17/12/2020', order='descending') assert issue254.columns[0] == 'Open' assert len(issue254) == 2669 issue254a = investpy.get_index_historical_data( index='dOw JoNeS UniTed StATEs', country='united states', interval='Daily', from_date='01/01/1996', to_date='17/12/2020', order='descending') assert issue254a.columns[0] == 'Open' assert len(issue254a) == 2669
def BenchmarkDataInvesting(bench, country): conn = sqlite3.connect('DatabaseVB.db') df = investpy.get_index_historical_data( index=f'{bench}', country=f'{country}', from_date='01/01/2000', to_date=datetime.today().strftime("%d/%m/%Y")) df.reset_index(inplace=True) df.rename(columns={'Date': 'Datum'}, inplace=True) df.to_sql(f'{bench}', if_exists="replace", con=conn)
def get_index_rates(data): final_df = pd.DataFrame() for _, row in data.iterrows(): for idx in row.Index: df = investpy.get_index_historical_data( index=idx, country=row.Country, from_date=config['earliest_date'], to_date=config['latest_date']) final_df = final_df.append(df.assign(Idx=idx)) final_df.to_csv("historical_index.csv")
def lessrisk_moregains(self,datei,datef,index_,qts): country_='Brazil' ##I don't know how the symbols of stocks in other country works, so i will limit my code to Brazil symbols = inv.get_stocks(country=country_).symbol my_rank = pd.DataFrame({'symb':[],'abv_bench_mean':[],'beta':[],'ratio':[]}) bench_hist = inv.get_index_historical_data(index=index_,country=country_,from_date=datei,to_date=datef,interval="Monthly").Open bench_hist_change = bench_hist[bench_hist!=0].pct_change() bench_hist_change = bench_hist_change.dropna() how_many_errors=0 ###counts how many unavailable assets for symb in symbols: if symb[-1]!='3': continue ##There's some stocks names listed in inv.get_stocks that information is unavailable ##so i will do a test first works=False try: asset_hist = inv.get_stock_historical_data(stock=symb,country=country_,from_date=datei,to_date=datef,interval="Monthly").Open asset_hist_change = asset_hist[asset_hist!=0].pct_change() asset_hist_change = asset_hist_change.dropna() works=True sort = pd.DataFrame({'benchmark':bench_hist_change,'asset':asset_hist_change}).dropna().reset_index() except: if(how_many_errors<30): how_many_errors+=1 print("Sorry, but "+symb+" is not available, it will be excluded from the rank") print("How many unavailable:"+str(how_many_errors)) if(how_many_errors==30): print("More than 30 assets unavailable, it could be a connection problem") how_many_errors+=1 pass ##sort = data sorted by common dates and delete dates not in common if (works)and(len(sort.benchmark)!=0)and(len(sort.asset)!=0): beta = line(sort.benchmark,sort.asset)[0] if(beta<=0): continue abv_bench = sort.asset-sort.benchmark abv_bench_mean = abv_bench.mean() ratio = abv_bench_mean/beta add={'symb':symb,'abv_bench_mean':(str(round(abv_bench_mean*100,2))+"%"),'beta':round(beta,4),'ratio':ratio} my_rank = my_rank.append(add,ignore_index=True) my_rank = my_rank.sort_values(by='ratio',ascending=False).head(qts) my_rank = my_rank.drop('ratio',1) fig,ax = plt.subplots(figsize=[10,5]) ax.set_title("Top "+str(qts)+" stocks with highest gains above "+index_+" index and lowest risks in "+country_) period_string = ("Period: "+datei +" to "+datef) ax.set_xlabel("Average monthly relative rentability(AMRR)="+" Average gains above Benchmark("+index_+" Index)\n"+period_string) plt.xticks([]) plt.yticks([]) ax.table(bbox=[0,0.1,1,0.8],rowLabels=range(1,qts+1),colLabels=['Symbol','AMRR','Beta(risk)'],cellText=my_rank.values,loc='center',rowLoc='center',cellLoc='center') fig.savefig('Rank.png') plt.show()
def get_investing_price(self): try: instrument_nature = self.security.instrument_type asset_country = self.security.country investing_ticker = self.security.investing_ticker start_date = '01/01/{}'.format(self.start_year) end_date = '31/12/{}'.format(self.end_year) if instrument_nature == "STOCK": hist_prices = invest.get_stock_historical_data( stock=investing_ticker, country=asset_country, from_date=start_date, to_date=end_date).reset_index(drop=False) elif instrument_nature == "ETF": hist_prices = invest.get_etf_historical_data( etf=investing_ticker, country=asset_country, from_date=start_date, to_date=end_date).reset_index(drop=False) elif instrument_nature == "index": hist_prices = invest.get_index_historical_data( index=investing_ticker, country=asset_country, from_date=start_date, to_date=end_date).reset_index(drop=False) elif instrument_nature == "Curr": hist_prices = invest.currency_crosses.get_currency_cross_historical_data( currency_cross=investing_ticker, from_date=start_date, to_date=end_date).reset_index(drop=False) elif instrument_nature == 'BOND': hist_prices = invest.bonds.get_bond_historical_data( bond=investing_ticker, from_date=start_date, to_date=end_date).reset_index(drop=False) elif instrument_nature == 'commodity': hist_prices = invest.commodity.get_bond_historical_data( commodity=investing_ticker, from_date=start_date, to_date=end_date).reset_index(drop=False) self.investing_prices = hist_prices except Exception as e: #print(str(e)) #logger.error("Error in get_investing_price: {}".format(str(e))) hist_prices = pd.DataFrame() self.investing_prices = hist_prices
def _download_data(self, asset_type, symbol, name, country, from_date, max_date): if asset_type == "BOND": # name == symbol df = ip.get_bond_historical_data(symbol, from_date=from_date, to_date=max_date) elif asset_type == "CERT": df = ip.get_certificate_historical_data(name, country=country, from_date=from_date, to_date=max_date) elif asset_type == "CRYPTO": df = ip.get_crypto_historical_data(name, from_date=from_date, to_date=max_date) elif asset_type == "COMM": df = ip.get_commodity_historical_data(symbol, from_date=from_date, to_date=max_date) elif asset_type == "ETF": df = ip.get_etf_historical_data(name, country=country, from_date=from_date, to_date=max_date) elif asset_type == "FUND": df = ip.get_fund_historical_data(name, country=country, from_date=from_date, to_date=max_date) elif asset_type == "FX": df = ip.get_currency_cross_historical_data(symbol, from_date=from_date, to_date=max_date) elif asset_type == "INDEX": df = ip.get_index_historical_data(name, country=country, from_date=from_date, to_date=max_date) elif asset_type == "STOCK": df = ip.get_stock_historical_data(symbol, country=country, from_date=from_date, to_date=max_date) return df
def idx_data(index, country): start1 = date(date.today().year - 1, mth, date.today().day) bdates = pd.DataFrame( index=pd.bdate_range(start=start1, end=date.today())) bdates.index.name = 'Date' tdy = str(date.today().day) + '/' + str( date.today().month) + '/' + str(date.today().year) start_ytd = '01/01/' + str(date.today().year) start_1y = str( date.today().day) + '/' + str(mth) + '/' + str(date.today().year - 1) df = investpy.get_index_historical_data(index=index, country=country, from_date=start_1y, to_date=tdy)['Close'] df = bdates.join(pd.DataFrame(df), on='Date').ffill() df.columns = [index] return df
def collect_data(self, start_date='01/01/2000', end_date=datetime.date.today().strftime('%d/%m/%Y')): '''Downloads asset data for a given start and end time range. Stores the downloaded data. Args ---- start_data: String Start date for collecting asset data. format should be dd/mm/yyyy end_date: String End date for asset data collection period. Default value is set to today ''' ticker_raw = 'ADANA-AEFES-ULKER-AKBNK-AKGRT-AKSA-ALARK-ALGYO-ALKIM-ANACM-ANHYT-ARCLK-ASELS-AYGAZ-BRISA-BRSAN-BTCIM-BUCIM-CEMTS-CIMSA-CLEBI-DEVA-DOHOL-ECILC-ECZYT-EGEEN-ENKAI-ERBOS-EREGL-FROTO-GARAN-GOODY-GSDHO-GUBRF-GUSGR-HEKTS-IPEKE-ISCTR-ISFIN-ISGYO-KARSN-KARTN-KCHOL-KERVT-KLMSN-KORDS-KRDMD-LOGO-MGROS-NETAS-NTHOL-OTKAR-PETKM-SAHOL-SASA-SISE-SKBNK-SODA-TATGD-TCELL-THYAO-TOASO-TRCAS-TRKCM-TSKB-TUPRS-VESTL-YATAS-AGHOL-YKBNK-ZOREN-KOZAA-TTRAK-DOAS-INDES-BIMAS-VAKBN-CCOLA-KAREL-TAVHL-HALKB-ISMEN-ALBRK-TKFEN-TTKOM-GOZDE-KOZAL-AKSEN-TRGYO-EKGYO-BIZIM-TMSN-HLGYO-PGSUS-ODAS-ISDMR-MAVI-ENJSA-MPARK-SOKM' ticker_list = [] for tick in ticker_raw.split('-'): ticker_list.append(tick) if 'BIST 100' not in ticker_list: ticker_list.insert(0, 'BIST 100') Data._check_directory(self.save_directory) k = 0 for i, ticker in enumerate(ticker_list): if ticker == 'BIST 100': data = investpy.get_index_historical_data(index=ticker, country='turkey', from_date=start_date, to_date=end_date, as_json=False, order='ascending') name = 'XU100' else: data = yf.download( ticker + '.IS', start=datetime.datetime.strptime(start_date, '%d/%m/%Y'), end=datetime.datetime.strptime(end_date, '%d/%m/%Y'), interval='1d', threads=True, progress=False) name = ticker data.to_csv(self.save_directory + '/' + name + '.csv') k += 1 print('Downloaded: %d' % k)
def get_data_from_investing(tickers, countries, years=15, t_type="index"): if t_type != "index" and t_type != "commodity": print("Currently, only work with indexes&commodities from investing.com.") return if not os.path.exists('data/stock_dfs'): os.makedirs('data/stock_dfs') start = dt.datetime.now() - dt.timedelta(days=years * 365) end = dt.datetime.now() if t_type == "index": for i in trange(len(tickers), desc="Downloading from investing.com"): ticker = tickers[i] if not os.path.exists('data/stock_dfs/{}.csv'.format(ticker)): df = investpy.get_index_historical_data(index=ticker, from_date=str(start.strftime("%d/%m/%Y")), to_date=str(end.strftime("%d/%m/%Y")), country=countries[i]) df.reset_index(inplace=True) df.set_index("Date", inplace=True) df.drop(['High', 'Low', 'Open', 'Volume', 'Currency'], axis=1, inplace=True) df.to_csv('data/stock_dfs/{}.csv'.format(ticker)) elif t_type == "commodity": for i in trange(len(tickers), desc="Downloading from investing.com"): ticker = tickers[i] df = investpy.get_commodity_historical_data(commodity=ticker, from_date=str(start.strftime("%d/%m/%Y")), to_date=str(end.strftime("%d/%m/%Y")), country=countries[i]) df.reset_index(inplace=True) df.set_index("Date", inplace=True) df.drop(['High', 'Low', 'Open', 'Volume', 'Currency'], axis=1, inplace=True) df.to_csv('data/stock_dfs/{}.csv'.format(ticker))
######################################################################################################################## investpy.get_indices(country=None) list_index_us = investpy.get_indices(country="United States") list_index_us_search = list_index_us[list_index_us['full_name'].str.contains( "MSCI")] list_index_kr = investpy.get_indices(country="south korea") list_index_kr_search = list_index_kr[list_index_kr['full_name'].str.contains( "MSCI")] list_index_kr2 = list_index_kr.sort_values(by=['name']) # 미국 S&P 500 지수 (1979.12.26 부터) investpy_snp500 = investpy.get_index_historical_data(index="S&P 500", country="United States", from_date="30/01/1900", to_date=DD_END_DATE) investpy_snp500.to_pickle('./Market_Watch_Data/investpy_snp500.pkl') # 미국 MSCI US REIT 지수 (2015.01.12 부터) investpy_msci_us_reit = investpy.get_index_historical_data( index="MSCI US REIT", country="United States", from_date="30/01/1900", to_date=DD_END_DATE) investpy_msci_us_reit.to_pickle( './Market_Watch_Data/investpy_msci_us_reit.pkl') # KOSPI (1981.05.01 부터) investpy_kospi = investpy.get_index_historical_data(index="KOSPI", country="south korea",
df_msci1 = df_msci0[df_msci0["Date"] < pd.to_datetime( "20210401", errors='coerce', format='%Y%m%d')] # 필요한 변수만 추려내기 df_msci_em0 = df_msci1[[ "Date", "MXEF_Open", "MXEF_High", "MXEF_Low", "MXEF_Close" ]] df_msci_world0 = df_msci1[[ "Date", "MXWO_Open", "MXWO_High", "MXWO_Low", "MXWO_Close" ]] ######################################################################################################################## # investpy 패키지를 사용하여 MSCI 자료 업데이트 받기 df_msci_em_update = investpy.get_index_historical_data( index="MSCI Emerging Markets", country="world", from_date="01/04/2021", to_date=DD_END_DATE) df_msci_world_update = investpy.get_index_historical_data( index="MSCI World", country="world", from_date="01/04/2021", to_date=DD_END_DATE) df_msci_em_update.reset_index(level=0, inplace=True) df_msci_world_update.reset_index(level=0, inplace=True) # 변수명 바꾸기 df_msci_em_update.rename(columns={ "Open": "MXEF_Open", "High": "MXEF_High",
] thread = 4 thread = thread if thread < cpu_count() else cpu_count() pool = Pool(processes=thread) start = dt.datetime.strptime('30/12/2009', '%d/%m/%Y') end = dt.datetime.now() - dt.timedelta(days=1) from_date = start.strftime('%d/%m/%Y') to_date = end.strftime('%d/%m/%Y') # Scrape SET Hist Data for index in ['SET', 'SET TRI']: df = investpy.get_index_historical_data(index=index, country='thailand', from_date=from_date, to_date=to_date, as_json=False, order='ascending') df.drop(labels='Currency', axis=1, inplace=True) df[['Return']] = df[['Close']].pct_change() df = df.round(6) df.to_csv(dir_path + 'data/' + index + '.csv') # Scrape investing.com investing_filter = [] for symbol in df_stock.loc[:, 'Filename'].values.tolist(): if not os.path.isfile(dir_path + 'data/investing/{}.csv'.format(symbol)): investing_filter.append( df_stock[df_stock['Filename'] == symbol].index.values[0]) for f in os.listdir(dir_path + 'data/investing'):
def index(request): n = "" price = 0.0 pnl = 0.0 pnlper = 0.0 n2 = "" price2 = 0.0 pnl2 = 0.0 pnlper2 = 0.0 # FX Live Data res = requests.get('https://finance.yahoo.com/currencies') soup = bs4.BeautifulSoup(res.text, "lxml") tables = soup.select('table') for row in tables[0].find_all('tr'): for ch in row.find_all('td')[1:2]: if ((ch.text == 'EUR/USD') | (ch.text == 'EUR/GBP') | (ch.text == 'USD/JPY') | (ch.text == 'AUD/USD') | (ch.text == 'USD/CNY') | (ch.text == 'GBP/USD')): for cell in row.find_all('td')[1:2]: n = cell.text for cell in row.find_all('td')[2:3]: price = cell.text for cell in row.find_all('td')[3:4]: pnl = cell.text for cell in row.find_all('td')[4:5]: pnlper = cell.text pnlper = float(pnlper[:len(pnlper) - 1]) a = currency(name=n, price=price, pnl=pnl, pnlper=pnlper) a.save() query_results1 = currency.objects.all().order_by('-id')[:6] # FX News head3 = [] head4 = [] imglink3 = [] pairs = [] context = [] res2 = requests.get( 'https://www.fxstreet.com/news?q=&hPP=17&idx=FxsIndexPro&p=0&dFR%5BTags%5D%5B0%5D=EURUSD&dFR%5BTags%5D%5B1%5D=GBPUSD' ) soup = bs4.BeautifulSoup(res2.text, "lxml") link = soup.find_all( "div", {"class": "fxs_col editorialhighlight editorialhighlight_medium"}) if ((link[0].h3.a.text).find('/') > 0): head3.append(link[0].h3.a.text) context.append(link[0].div.p.text) imglink3.append(link[0].div.a.img['data-src']) if ((link[1].h3.a.text).find('/') > 0): head3.append(link[1].h3.a.text) context.append(link[1].div.p.text) imglink3.append(link[1].div.a.img['data-src']) #Other headlines res5 = requests.get('https://www.dailyfx.com/market-news') soup5 = bs4.BeautifulSoup(res5.text, "lxml") link5 = soup5.find_all( "a", {"class": "dfx-articleListItem jsdfx-articleListItem d-flex mb-3"}) for i in range(len(link5)): if ((len(link5[i].span.text) < 73) & ((link5[i].span.text).find('/') >= 0) & ((link5[i].span.text).find('?') < 0) & ((link5[i].span.text).find('Charts') < 0)): head4.append(link5[i].span.text) # Removing duplicates head4 = list(dict.fromkeys(head4)) head4 = head4[len(head4) - 4:len(head4)] img_url_fx = [ 'https://currencylive.com/news/wp-content/uploads/2019/08/100859-gbp-usd-pound-rises-may-brussels.jpg', 'https://responsive.fxempire.com/cover/1845x1230/webp-lossy-70.q50/_fxempire_/2020/01/Pounds-British.jpg', 'https://images.newindianexpress.com/uploads/user/imagelibrary/2019/9/25/w900X450/australian-2874029_960_720.jpg', 'https://responsive.fxempire.com/cover/1845x1230/webp-lossy-70.q50/_fxempire_/2019/11/US-Dollars-Yen-Notes.jpg' ] for i in range(len(head4)): g = fxnews(headline=head4[i], imgurl=img_url_fx[i]) g.save() query_results6 = fxnews.objects.all().order_by('-id')[:4] for i in range(len(head3)): if (head3[i].find('/') > 0): arr = head3[i].split('/') first = (arr[0])[len(arr[0]) - 3:len(arr[0])] second = arr[1][0:3] pairs.append(first + '/' + second) for i in range(len(head4)): if (head4[i].find('/') > 0): arr = head4[i].split('/') first = (arr[0])[len(arr[0]) - 3:len(arr[0])] second = arr[1][0:3] pairs.append(first + '/' + second) pairs[0].split('/') first = ((pairs[0].split('/'))[0])[:3] second = ((pairs[0].split('/'))[1])[0:3] pair1 = first + '/' + second pairs[0].split('/') third = ((pairs[1].split('/'))[0])[:3] fourth = ((pairs[1].split('/'))[1])[0:3] pair2 = third + '/' + fourth imglink3 = imglink3[0] fxcenter = head3[0] context = context[0] #Currency Graph start_date = date.today() end_date = start_date - timedelta(days=21) rates = [] forexdates = [] rates2 = [] forexdates2 = [] before = 0.0 before2 = 0.0 c = CurrencyRates() for i in range(0, 21): new = c.get_rate(first, second, end_date) if (before != new): rates.append(new) forexdates.append(end_date.strftime("%Y-%m-%d")) before = new end_date = end_date + timedelta(days=1) end_date = start_date - timedelta(days=21) for i in range(0, 21): new = c.get_rate(third, fourth, end_date) if (before2 != new): rates2.append(new) forexdates2.append(end_date.strftime("%Y-%m-%d")) before2 = new end_date = end_date + timedelta(days=1) #Index data res3 = requests.get('https://finance.yahoo.com/world-indices') soup = bs4.BeautifulSoup(res3.text, "lxml") tables2 = soup.select('table') for row in tables2[0].find_all('tr'): for ch in row.find_all('td')[1:2]: if ((ch.text == 'S&P 500') | (ch.text == 'Dow Jones Industrial Average') | (ch.text == 'NASDAQ Composite') | (ch.text == 'HANG SENG INDEX') | (ch.text == 'Nikkei 225') | (ch.text == 'S&P BSE SENSEX')): for cell in row.find_all('td')[1:2]: if (ch.text == 'Dow Jones Industrial Average'): n2 = 'Dow Jones' elif (ch.text == 'S&P BSE SENSEX'): n2 = 'BSE SENSEX' else: n2 = cell.text for cell in row.find_all('td')[2:3]: price2 = cell.text for cell in row.find_all('td')[3:4]: pnl2 = cell.text for cell in row.find_all('td')[4:5]: pnlper2 = cell.text pnlper2 = float(pnlper2[:len(pnlper2) - 1]) b = indexes(name=n2, price=price2, pnl=pnl2, pnlper=pnlper2) b.save() query_results2 = indexes.objects.all().order_by('-id')[:6] # Index Graph #Dow Graph end = date.today() start = (end - timedelta(days=21)) end = end.strftime('%d/%m/%Y') start = start.strftime('%d/%m/%Y') dataframe2 = investpy.get_index_historical_data(index='Dow 30', country='united states', from_date=start, to_date=end) dprice = (round(dataframe2['Close'], 3)) dow = list((round(dataframe2['Close'], 3))) dumvar2 = dprice.index dates = [] for i in range(len(dprice)): dates.append(dumvar2[i].strftime('%d-%m-%Y')) #BSE Sensex graph end2 = date.today() start2 = (end2 - timedelta(days=21)) end2 = end2.strftime('%d/%m/%Y') start2 = start2.strftime('%d/%m/%Y') dataframe = investpy.get_index_historical_data(index='BSE Sensex', country='india', from_date=start2, to_date=end2) dprice2 = (round(dataframe['Close'], 3)) bse = list((round(dataframe['Close'], 3))) dumvar = dprice2.index bsedates = [] for i in range(len(dprice2)): bsedates.append(dumvar[i].strftime('%d-%m-%Y')) #Index News index_title = [] index_description = [] index_imgurl = [] newsapi = NewsApiClient(api_key='3de8090563454aadbd116bb099718ded') string = ['Nifty', 'Sensex', 'S&P', 'Dow'] y = re.compile('<[^>]+>') x = date.today() z = x x = x - timedelta(days=1) for i in string: if i == 'Sensex' or i == 'Nifty': start_date = z else: start_date = x all_articles = newsapi.get_everything( q=i, from_param=start_date.strftime("%Y-%m-%d"), language="en", sort_by="relevancy", page_size=5) articles = all_articles['articles'] for j in range(2): if ((articles[j]['title'] not in index_title) & (articles[j]['title'].find('?') == -1)): index_title.append(articles[j]['title']) index_description.append(y.sub('', articles[j]['description'])) index_imgurl.append(articles[j]['urlToImage']) else: pass for i in range(1, 5): if index_imgurl[i] != None and index_title[ i] != None and index_description[i] != None: f = stocknews(headline=index_title[i], imgurl=index_imgurl[i]) f.save() index_centertitle = index_title[0] index_centerdescription = index_description[0] index_centerimgurl = index_imgurl[0] query_results3 = stocknews.objects.all().order_by('-id')[:4] #Equities Graph end = date.today() start = end - timedelta(days=7) diff = [] aapl = yf.download("AAPL", start, end) nflx = yf.download("NFLX", start, end) msft = yf.download("MSFT", start, end) amzn = yf.download("AMZN", start, end) tsla = yf.download("TSLA", start, end) fb = yf.download("FB", start, end) comp = [aapl, nflx, msft, amzn, tsla, fb] for i in comp: dprice = (round(i['Close'], 3)) diff.append(round(dprice[-1] - dprice[-2], 3)) # Indian Equities end3 = date.today() start3 = end3 - timedelta(days=7) diff3 = [] end3 = end3.strftime('%d/%m/%Y') start3 = start3.strftime('%d/%m/%Y') tcs = investpy.get_stock_historical_data(stock='TCS', country='india', from_date=start3, to_date=end3) icbk = investpy.get_stock_historical_data(stock='ICBK', country='india', from_date=start3, to_date=end3) hdbk = investpy.get_stock_historical_data(stock='HDBK', country='india', from_date=start3, to_date=end3) infy = investpy.get_stock_historical_data(stock='INFY', country='india', from_date=start3, to_date=end3) lart = investpy.get_stock_historical_data(stock='LART', country='india', from_date=start3, to_date=end3) reli = investpy.get_stock_historical_data(stock='RELI', country='india', from_date=start3, to_date=end3) comp2 = [tcs, icbk, hdbk, infy, lart, reli] for i in comp2: ghe = (round(i['Close'], 3)) diff3.append(round(ghe[-1] - ghe[-2], 3)) #Equities data res4 = requests.get( 'https://www.tradingview.com/markets/stocks-usa/market-movers-large-cap/' ) soup4 = bs4.BeautifulSoup(res4.text, "lxml") tables4 = soup4.select('table') rows4 = tables4[0].find_all('tr') for i in range(1, 50): cells4 = rows4[i].find_all('td') name4 = cells4[0].div.a.text price4 = cells4[1].text pnlper4 = cells4[2].text pnl4 = cells4[3].text pnlper4 = float(pnlper4[:len(pnlper4) - 1]) if ((name4 == 'AAPL') | (name4 == 'MSFT') | (name4 == 'NFLX') | (name4 == 'AMZN') | (name4 == 'FB') | (name4 == 'TSLA')): d = equities(name=name4, price=price4, pnl=pnl4, pnlper=pnlper4) d.save() query_results4 = equities.objects.all().order_by('-id')[:6] streq = ["Trending stocks"] headeq = [] imgurleq = [] for j in range(len(streq)): top_headlineseq = newsapi.get_everything( q=streq[j], from_param=(date.today() - timedelta(days=1)).strftime('%Y-%m-%d'), sort_by='relevancy', language='en', ) articleseq = top_headlineseq['articles'] for i in range(len(articleseq)): if (articleseq[i]['title'].find('Trending stocks') != -1): e = eqnews(headline=articleseq[i]['title'], imgurl=articleseq[i]['urlToImage']) e.save() query_results5 = eqnews.objects.all().order_by('-id')[:4] # newsapi = NewsApiClient(api_key='082157f2d57c4560878f51cc05ace5ea') streq2 = ["Trending stocks"] for j in range(len(streq2)): top_headlineseq2 = newsapi.get_everything( q=streq2[j], from_param=(date.today() - timedelta(days=1)).strftime('%Y-%m-%d'), sort_by='relevancy', language='en', ) articleseq2 = top_headlineseq2['articles'] title_center_eq = articleseq2[1]['title'] imgURL_center_eq = articleseq2[1]['urlToImage'] desc_center_eq = articleseq2[1]['description'] desc_center_eq = desc_center_eq.split(".") desc_center_eq = desc_center_eq[0] return render( request, 'news/index.html', { 'query_results1': query_results1, 'query_results2': query_results2, 'query_results3': query_results3, 'query_results4': query_results4, 'query_results5': query_results5, 'query_results6': query_results6, 'fxcenter': fxcenter, 'head3': head3, 'head4': head4, 'context': context, 'imglink3': imglink3, 'pairs': pairs, 'dow': dow, 'dates': dates, 'rates': rates, 'forexdates': forexdates, 'pair1': pair1, 'rates2': rates2, 'forexdates2': forexdates2, 'pair2': pair2, 'diff': diff, 'bse': bse, 'bsedates': bsedates, 'diff3': diff3, 'headeq': headeq, 'imgurleq': imgurleq, 'index_centertitle': index_centertitle, 'index_centerdescription': index_centerdescription, 'index_centerimgurl': index_centerimgurl, 'title_center_eq': title_center_eq, 'imgURL_center_eq': imgURL_center_eq, 'desc_center_eq': desc_center_eq, 'img_url_fx': img_url_fx })
def test_investpy_indices(): """ This function checks that index data retrieval functions listed in investpy work properly. """ params = [ { 'country': 'spain', }, { 'country': None, }, ] for param in params: investpy.get_indices(country=param['country']) investpy.get_indices_list(country=param['country']) params = [ { 'country': None, 'columns': ['name', 'currency'], 'as_json': True }, { 'country': None, 'columns': ['name', 'currency'], 'as_json': False }, { 'country': 'spain', 'columns': ['name', 'currency'], 'as_json': True }, { 'country': 'spain', 'columns': ['name', 'currency'], 'as_json': False }, { 'country': 'spain', 'columns': None, 'as_json': False }, ] for param in params: investpy.get_indices_dict(country=param['country'], columns=param['columns'], as_json=param['as_json']) investpy.get_index_countries() params = [ { 'as_json': True, 'order': 'ascending', }, { 'as_json': False, 'order': 'ascending', }, { 'as_json': True, 'order': 'descending', }, { 'as_json': False, 'order': 'descending', }, ] for param in params: investpy.get_index_recent_data(index='ibex 35', country='spain', as_json=param['as_json'], order=param['order'], interval='Daily') investpy.get_index_historical_data(index='ibex 35', country='spain', from_date='01/01/2018', to_date='01/01/2019', as_json=param['as_json'], order=param['order'], interval='Daily') params = [ { 'index': 'ibex 35', 'country': 'spain', 'as_json': False }, { 'index': 'ibex 35', 'country': 'spain', 'as_json': True } ] for param in params: investpy.get_index_information(index=param['index'], country=param['country'], as_json=param['as_json']) params = [ { 'country': 'united states', 'as_json': False, 'n_results': 10 }, { 'country': 'united kingdom', 'as_json': True, 'n_results': 10 } ] for param in params: investpy.get_indices_overview(country=param['country'], as_json=param['as_json'], n_results=param['n_results']) investpy.search_indices(by='name', value='ibex')
#k1=k[k.Symbol==k.Symbol.unique()[pos]].iloc[-lb:] automl = AutoML(mode='Compete', total_time_limit=600) k1 = investpy.search_quotes(text='Kotak NIFTY ETF', products=['etfs'], countries=['India'], n_results=2)[0] k1 = investpy.get_etf_historical_data('KOTAKNIFTY', country='India', from_date='01/01/2010', to_date='20/03/2021') #k1=investpy.search_quotes(text='AARTIIND',products=['stocks'],countries=['India'],n_results=2)[0].retrieve_historical_data(from_date='01/01/2019',to_date='07/12/2020') k2 = investpy.get_index_historical_data(index='India VIX', country='India', from_date='01/01/2010', to_date='20/03/2021') def slret(o, h, l, c, sl): hp = ((h - o) / o) * 100 if -hp < sl: return sl else: return (((o - c) / o) * 100) def slret_long(x): if x > 0: return 1 else:
connection = pymysql.connect(host="fumire.moe", user="******", password=f.readline().strip(), db="fumiremo_StockDB", charset="utf8", port=3306) cursor = connection.cursor(pymysql.cursors.DictCursor) for country in ["south korea", "japan", "united states"]: print(country) for _, row1 in investpy.get_indices(country=country).iterrows(): print("-", row1["name"]) try: for date, row2 in investpy.get_index_historical_data( index=row1["name"], country=country, from_date="01/01/1900", to_date=datetime.datetime.today().strftime("%d/%m/%Y"), order="descending").iterrows(): query = "SELECT * FROM `IndexData` WHERE `country` LIKE '%s' AND `Name` LIKE '%s' AND `Symbol` LIKE '%s' AND `Date` = '%s'" % ( country, row1["name"], row1["symbol"], date.date()) cursor.execute(query) if cursor.fetchall(): break query = "INSERT INTO `IndexData` (`IndexColumn`, `Country`, `Name`, `Symbol`, `Date`, `Open`, `High`, `Low`, `Close`, `Volume`, `Currency`) VALUES (NULL, '%s', '%s', '%s', '%s', '%f', '%f', '%f', '%f', '%d', '%s');" % ( country, row1["name"], row1["symbol"], date.date(), row2["Open"], row2["High"], row2["Low"], row2["Close"], row2["Volume"], row2["Currency"]) cursor.execute(query)
def test_investpy_indices(): """ This function checks that index data retrieval functions listed in investpy work properly. """ params = [ { 'country': 'spain', }, { 'country': None, }, ] for param in params: investpy.get_indices(country=param['country']) investpy.get_indices_list(country=param['country']) params = [ { 'country': None, 'columns': ['name', 'currency'], 'as_json': True }, { 'country': None, 'columns': ['name', 'currency'], 'as_json': False }, { 'country': 'spain', 'columns': ['name', 'currency'], 'as_json': True }, { 'country': 'spain', 'columns': ['name', 'currency'], 'as_json': False }, { 'country': 'spain', 'columns': None, 'as_json': False }, ] for param in params: investpy.get_indices_dict(country=param['country'], columns=param['columns'], as_json=param['as_json']) investpy.get_index_countries() params = [ { 'as_json': True, 'order': 'ascending', 'debug': False }, { 'as_json': False, 'order': 'ascending', 'debug': True }, { 'as_json': True, 'order': 'descending', 'debug': False }, { 'as_json': False, 'order': 'descending', 'debug': False }, ] for param in params: investpy.get_index_recent_data(index='ibex 35', country='spain', as_json=param['as_json'], order=param['order'], debug=param['debug']) investpy.get_index_historical_data(index='ibex 35', country='spain', from_date='01/01/2018', to_date='01/01/2019', as_json=param['as_json'], order=param['order'], debug=param['debug']) investpy.search_indices(by='name', value='ibex')
def get_indices(index_name, country, start, end): index = investpy.get_index_historical_data(index_name, country=country, from_date=start, to_date=end) return index
def test_indices_errors(): """ This function raises errors on index retrieval functions """ try: retrieve_indices(test_mode=None) except: pass try: retrieve_index_countries(test_mode=None) except: pass try: retrieve_global_indices_countries(test_mode=None) except: pass params = [ { 'country': ['error'] }, { 'country': 'error' }, ] for param in params: try: investpy.get_indices(country=param['country']) except: pass try: investpy.get_indices_list(country=param['country']) except: pass params = [ { 'country': ['error'], 'columns': None, 'as_json': False }, { 'country': 'spain', 'columns': None, 'as_json': 'error' }, { 'country': 'spain', 'columns': 0, 'as_json': True }, { 'country': 'spain', 'columns': ['error'], 'as_json': False }, ] for param in params: try: investpy.get_indices_dict(country=param['country'], columns=param['columns'], as_json=param['as_json']) except: pass params = [ { 'index': None, 'country': 'spain', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': ['error'], 'country': 'spain', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': None, 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'error', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'netherlands', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': ['error'], 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'as_json': 'error', 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'as_json': True, 'order': 'error', 'debug': True }, { 'index': 'error', 'country': 'spain', 'as_json': True, 'order': 'ascending', 'debug': True }, { 'index': ['error'], 'country': 'spain', 'as_json': True, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'as_json': True, 'order': 'ascending', 'debug': 'error' }, ] for param in params: try: investpy.get_index_recent_data(index=param['index'], country=param['country'], as_json=param['as_json'], order=param['order'], debug=param['debug']) except: pass params = [ { 'index': None, 'country': 'spain', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'error', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'netherlands', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': None, 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': ['error'], 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': 'error', 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'error', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': 'error', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/2019', 'to_date': 'error', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'error', 'country': 'spain', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': ['error'], 'country': 'spain', 'from_date': '01/01/2018', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/1998', 'to_date': '01/01/2019', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/2019', 'to_date': '01/01/1998', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/1900', 'to_date': '01/01/1950', 'as_json': False, 'order': 'ascending', 'debug': True }, { 'index': 'ibex 35', 'country': 'spain', 'from_date': '01/01/2019', 'to_date': '01/03/2019', 'as_json': True, 'order': 'ascending', 'debug': 'error' }, ] for param in params: try: investpy.get_index_historical_data(index=param['index'], country=param['country'], from_date=param['from_date'], to_date=param['to_date'], as_json=param['as_json'], order=param['order'], debug=param['debug']) except: pass params = [ { 'by': None, 'value': 'ibex', }, { 'by': ['error'], 'value': 'ibex', }, { 'by': 'error', 'value': 'ibex', }, { 'by': 'name', 'value': None, }, { 'by': 'name', 'value': ['error'], }, { 'by': 'name', 'value': 'error', }, ] for param in params: try: investpy.search_indices(by=param['by'], value=param['value']) except: pass
#investing.com #pip install investpy==0.9.7 import investpy df = investpy.get_stock_recent_data(stock='BBVA', country='spain') print(df.head()) df = investpy.get_stock_historical_data(stock='BBVA', country='spain', from_date='01/01/2010', to_date='01/01/2019') print(df.head()) df = investpy.get_indices_historical_data(indices='ise-100', country='turkey', from_date='01/01/2010', to_date='01/01/2019') print(df.head()) BIST100=investpy.get_index_historical_data(index='BIST 100', country='turkey', from_date='01/01/2001', to_date='27/10/2019',order='ascending') writer = pd.ExcelWriter('BIST100.xlsx', engine='xlsxwriter') BIST100.to_excel(writer, 'Sheet1') writer.save()