def swiss_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.snb.ch/en/iabout/stat/statrep/id/current_interest_exchange_rates#t3") bs = BeautifulSoup(response.text, "html.parser") li = bs.select(".rates-values-item-container.col-md-6") li_list = [] for i in li[4:]: span_list = [] for j in i.select("span"): span_list.append(j.text) li_list.append(span_list) for i in li_list: if i[1].split(' ')[1] == 'JPY': JPY = i[2] if i[1].split(' ')[1] == 'USD': USD = i[2] JPY = float(JPY) / 100 foreignbank_info(conn, 'CHF', USD, JPY, now)
def southafrica_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://wwwrs.resbank.co.za/webindicators/exchangerates.aspx") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".DataTable tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list[3:27]: if i[0].replace(' ', '').split(':')[0] == 'RandperUSDollar': USD = i[1] if i[0].replace(' ', '').split(':')[0] == 'Japan': JPY = i[1] JPY = 1 / float(JPY) foreignbank_info(conn, 'ZAR', USD, JPY, now)
def hungary_craw(conn): now = datetime.datetime.now() year = str(now.year) month = str(now.month) day = str(now.day) session = requests.session() response = session.get( "https://www.otpbank.hu/apps/exchangerate/api/exchangerate/offline/" + year + "-" + month + "-" + day) result = json.loads(response.text) result_data = result['dates'][0]['versions'][0]['exchangeRates'] for i in result_data: if i['currencyCode'] == 'USD': USD = i['middleRate'] if i['currencyCode'] == 'JPY': JPY = i['middleRate'] # 100엔 기준으로 값이 나오기 때문에 1엔 기준으로 바꿈 JPY = JPY / 100 foreignbank_info(conn, 'HUF', USD, JPY, now)
def israel_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.boi.org.il/en/Markets/ExchangeRates/Pages/Default.aspx") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select("table tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list[1:]: if i[2] == 'Japan': JPY = i[3] if i[2] == 'USA': USD = i[3] JPY = float(JPY) / 100 foreignbank_info(conn, 'ILS', USD, JPY, now)
def russia_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.cbr.ru/eng/currency_base/daily/") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".data tr") td_list = [] for i in tr[1:]: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[1] == 'JPY': JPY = i[4] if i[1] == 'USD': USD = i[4] JPY = float(JPY) / 100 foreignbank_info(conn, 'RUB', USD, JPY, now)
def egypt_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.cbe.org.eg/en/EconomicResearch/Statistics/Pages/ExchangeRatesListing.aspx" ) bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".table tbody tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[0].split()[0] == 'Japanese': JPY = i[1] if i[0].split()[0] == 'US': USD = i[1] JPY = float(JPY) / 100 foreignbank_info(conn, 'EGP', USD, JPY, now)
def sweden_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.seb.ee/eng/exchange-rates") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".crc-row") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[0].replace(' ', '').replace('\n','').split('-')[0] == 'JPY': JPY = i[4].replace(',', '.') if i[0].replace(' ', '').replace('\n','').split('-')[0] == 'USD': USD = i[4].replace(',', '.') JPY = 1 / (float(JPY) / 10) USD = 1 / (float(USD) / 10) foreignbank_info(conn, 'SEK', USD, JPY, now)
def bangladesh_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.bb.org.bd/econdata/exchangerate_dtl.php?loadmode=1&cboCurrency=&ddlYear=&UsersList=&SelectPeriod=,", verify=False) bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".standardcellborder tr") td_list = [] for i in tr[2:]: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if len(i) == 3: if i[0] == 'JPY': JPY = i[1] if i[0] == 'USD': USD = i[2] foreignbank_info(conn, 'BDT', USD, JPY, now)
def poland_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.nbp.pl/homen.aspx?f=/kursy/RatesA.html") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".pad5 tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if len(i) == 3: if i[1].split(' ')[1] == 'JPY': JPY = i[2] if i[1].split(' ')[1] == 'USD': USD = i[2] JPY = float(JPY) / 100 foreignbank_info(conn, 'PLN', USD, JPY, now)
def denmark_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.nationalbanken.dk/en/statistics/exchange_rates/Pages/Default.aspx" ) bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".table.table-condensed.table-bordered tbody tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[1] == 'JPY': JPY = i[2] if i[1] == 'USD': USD = i[2] JPY = float(JPY) / 100 USD = float(USD) / 100 foreignbank_info(conn, 'DKK', USD, JPY, now)
def taiwan_craw(conn): now = datetime.datetime.now() session = requests.session() retry = True while retry: try: response = session.get("https://rate.bot.com.tw/xrt?Lang=en-US") retry = False except: pass bs = BeautifulSoup(response.text, "html.parser") currencys = bs.select("tbody tr") for i in currencys: for j in i.select(".hidden-phone.print_show"): if re.search(r'\((.*?)\)', j.text).group(1) == 'USD': USD = i.select( ".rate-content-sight.text-right.print_hide")[0].text elif re.search(r'\((.*?)\)', j.text).group(1) == 'JPY': JPY = i.select( ".rate-content-sight.text-right.print_hide")[0].text foreignbank_info(conn, 'TWD', USD, JPY, now)
def kuwait_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.nbk.com/kuwait/personal/investments/currency-rates.html") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select("table tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list[1:]: if i[1].replace(' ', '') == 'USDOLLAR': USD = i[2] if i[1].replace(' ', '') == 'JAPANESEYEN': JPY = i[2] foreignbank_info(conn, 'KWD', USD, JPY, now)
def brunei_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "http://www.bibd.com.bn/resource-centre/rates/foreign-exchange") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".tab_content tbody tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if len(i) == 4 and len(i[0].split('(')) == 2: if i[0].split('(')[1].split(')')[0] == 'JPY': JPY = i[3] if i[0].split('(')[1].split(')')[0] == 'USD': USD = i[3] JPY = float(JPY) / 100 foreignbank_info(conn, 'BND', USD, JPY, now)
def indonesia_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.bi.go.id/en/moneter/informasi-kurs/transaksi-bi/Default.aspx" ) bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".table1 tr") tr = tr[1:26] td_list = [] for i in tr[1:]: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[0].replace(' ', '') == 'JPY': JPY = i[3].replace(',', '') if i[0].replace(' ', '') == 'USD': USD = i[3].replace(',', '') JPY = float(JPY) / 100 foreignbank_info(conn, 'IDR', USD, JPY, now)
def india_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://mv.statebank/exchange-rate") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".table.table-bordered tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list[4:]: if i[0].replace(' ','').split('/')[0] == 'JapaneseYen': JPY = i[1].replace('\t','').replace('\n','') if i[0].replace(' ','').split('/')[0] == 'USDollar': USD = i[1].replace('\t','').replace('\n','') JPY = 1 / (float(JPY) * 10) USD = 1 / (float(USD) / 1000) foreignbank_info(conn, 'INR', USD, JPY, now)
def turkey_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "http://www.turkishliratoday.com/central-bank-of-turkey-exchange-rates.php" ) bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".table.table-striped.table-bordered.table-hover tr") td_list = [] for i in tr[1:]: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[1].split('/')[0].split(' ')[1] == 'JPY': JPY = i[4].replace(',', '.') if i[1].split('/')[0].split(' ')[1] == 'USD': USD = i[4].replace(',', '.') JPY = float(JPY) / 100 foreignbank_info(conn, 'TRY', USD, JPY, now)
def czech_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.cnb.cz/en/financial-markets/foreign-exchange-market/central-bank-exchange-rate-fixing/central-bank-exchange-rate-fixing/") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select(".currency-table tbody tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[3] == 'JPY': JPY = i[4] if i[3] == 'USD': USD = i[4] JPY = float(JPY) / 100 foreignbank_info(conn, 'CZK', USD, JPY, now)
def brazil_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://ptax.bcb.gov.br/ptax_internet/consultarTodasAsMoedas.do?method=consultaTodasMoedas") bs = BeautifulSoup(response.text,"html.parser") tr = bs.select("table tr") td_list = [] for i in tr[1:]: td = [] for j in i.select("td"): td.append(j.text.replace(',','.')) td_list.append(td) for i in td_list: if len(i) > 4: if i[2] == 'USD': USD = i[4] elif i[2] == 'JPY': JPY = i[4] foreignbank_info(conn, 'BRL', USD, JPY, now)
def mxp_craw(conn): now = datetime.datetime.now() headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7', 'Cache-Control': 'max-age=0', 'Connection': 'keep-alive', 'Cookie': 'JSESSIONID=c7422415da2ffde8c6b8c8543ce6; TS018df36d=0189f484afa17e09eb8407d12480d8cfad2efb1fe58f2001c6af7639d5d054d32692eccf027228513a001dd926e1bfecbc175419bf13ca6ca8f9f30fd31782423cab32272a706e4ac52d51610dd929918d0b34e81c; ser9108090=3343861418.39455.0000; ser25268080=642664106.36895.0000; _ga=GA1.3.1837497598.1604641720; _gid=GA1.3.1948073149.1604641720; TS0175f232=0189f484aff9ef6bcd5ab8240c477a50dd4241ca86ec82b9b6b39e170134521d400c868644f7c0ecb6db0da71aede0df40079e386296dc611dc3cb14999a6c611ab49a4efe0f77b115cefc576d2d44c8018b6fdadb', 'Host': 'www.banxico.org.mx', 'Sec-Fetch-Dest': 'document', 'Sec-Fetch-Mode': 'navigate', 'Sec-Fetch-Site': 'none', 'Sec-Fetch-User': '******', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36' } req = requests.get( 'https://www.banxico.org.mx/SieInternet/consultarDirectorioInternetAction.do?accion=consultarCuadroAnalitico&idCuadro=CA113§or=6&locale=en', headers=headers) html = req.text soup = BeautifulSoup(html, 'html.parser') table = soup.find_all("table") trs = table[1].find_all("tr") usd_tr = trs[66] jpy_tr = trs[36] usd_td = usd_tr.find_all("td") jpy_td = jpy_tr.find_all("td") usd = usd_td[4].text.strip().replace("\n", "") # 쓸데없는 정보들이 붙어와서 없애주는 것. jpy = jpy_td[4].text.strip().replace("\n", "") # 쓸데없는 정보들이 붙어와서 없애주는 것. foreignbank_info(conn, 'MXN', usd, jpy, now)
def jordan_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://alawnehexchange.com/en/currency_exchange") text = re.search('(?<=,"JOD":).+(?=},"nice_menus_options)', response.text) data = json.loads(text.group()) USD = data['USD']['BUY'] JPY = data['JPY']['BUY'] foreignbank_info(conn, 'JOD', USD, JPY, now)
def aus_crawling(conn): now = datetime.datetime.now() aus_bank = requests.get( "https://www.commbank.com.au/content/data/forex-rates/AUD.json?path=1604630399626" ) site_json = json.loads(aus_bank.text) usd = (site_json.get('currencies')[0].get('bbfcash')) jpy = (site_json.get('currencies')[16].get('bbfcash')) usd = 1 / float(usd) jpy = 1 / float(jpy) # print(usd, jpy, now, country) foreignbank_info(conn, 'AUD', usd, jpy, now)
def catarrh_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.qib.com.qa/currency_convertor_page.php") result = json.loads(response.text) for i in result: if i['currency'] == 'USD': USD = i['buyrate'] elif i['currency'] == 'JPY': JPY = i['buyrate'] foreignbank_info(conn, 'QAR', USD, JPY, now)
def eng_crawling(conn): now = datetime.datetime.now() eng_bank = requests.get( "https://www5.trkd-hs.com/hsbcfxwidget/data/getFXList?token=0vg8cORxRLBsrWg9C9UboMT%2BkN2Ykze6vFnRV1nA8DE%3D&_=1604567448186" ) site_json = json.loads(eng_bank.text.replace('data', '"data"')) usd = (site_json.get('data').get('fxList')[0].get('buy')) jpy = (site_json.get('data').get('fxList')[7].get('buy')) jpy = float(jpy) / 10000 foreignbank_info(conn, 'GBP', usd, jpy, now)
def cad_crawling(conn): now = datetime.datetime.now() cad_bank = requests.get( "https://www.bmo.com/bmocda/templates/json_fx_include.jsp") page_source = cad_bank.text re_matched = re.search(r'var FXLong = (.+?);', page_source, re.S) json_string = re_matched.group(1) json_string = json_string.replace("'", '"') money_list = json.loads(json_string) usd = (money_list.get("USD").get("NA").get("BUY")) jpy = (money_list.get("JPY").get("NA").get("BUY")) foreignbank_info(conn, 'CAD', usd, jpy, now)
def php_crawling(conn): now = datetime.datetime.now() headers = { "Upgrade-Insecure-Requests": "1", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36" } phili_bank = requests.get("https://www.dbp.ph/foreign-exchange-rates/", headers=headers) soup = BeautifulSoup(phili_bank.content, "html.parser") # 크롤링 예전 소스 # trs = soup.select("#post-5035 > div > div:nth-child(3) > table tr") # contry_list = [] # buy_list = [] # for i in range(len(trs)): # contry_list.append(trs[i].select("#post-5035 > div > div:nth-child(3) > table > tbody > tr:nth-child({}) > td.tbl-item-l".format(i))) # buy_list.append(trs[i].select('#post-5035 > div > div:nth-child(3) > table > tbody > tr:nth-child({}) > td:nth-child(2)'.format(i))) # usd = buy_list[1][0].text # jpy = buy_list[2][0].text trs = soup.select( ".tbl.tbl--clean.tbl--res.tbl--size-2.tbl--75-t tbody tr") usd_list = [] jpy_list = [] for i in trs: if i.select("td")[0].text == 'USD': usd_list.append(i.select("td")[1].text) if i.select("td")[0].text == 'JPY': jpy_list.append(i.select("td")[1].text) usd = usd_list[0] jpy = jpy_list[0] foreignbank_info(conn, 'PHP', usd, jpy, now)
def bahrain_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://www.bfc.com.bh/personal/currency-exchange/") bs = BeautifulSoup(response.text, "html.parser") tr = bs.select("table tr") td_list = [] for i in tr: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list[1:]: if i[0].split('\n')[3] == 'USD': USD = i[1] response2 = session.get( "https://www.bfc.com.bh/personal/currency-exchange/continent/asia-pacific/#rates" ) bs2 = BeautifulSoup(response2.text, "html.parser") tr2 = bs2.select("table tr") td2_list = [] for i in tr2: tds2 = [] for j in i.select("td"): tds2.append(j.text) td2_list.append(tds2) for i in td2_list[1:]: if i[0].split('\n')[3] == 'JPY': JPY = i[1] USD = 1 / float(USD) JPY = 1 / float(JPY) foreignbank_info(conn, 'BHD', USD, JPY, now)
def malaysia_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.bnm.gov.my/?tpl=exchangerates") bs = BeautifulSoup(response.text, "html.parser") hour = now.hour minute = now.minute mytime = str(hour)+str(minute) mytime = int(mytime) if mytime < 900: myr_time = '0900' elif mytime > 900 and mytime < 1130: myr_time = '0900' elif mytime > 1130 and mytime < 1200: myr_time = '1130' elif mytime > 1200 and mytime < 1700: myr_time = '1200' elif mytime > 1700: myr_time = '1700' tr = bs.select("#Content"+myr_time+" tr") td_list = [] for i in tr[2:]: tds = [] for j in i.select("td"): tds.append(j.text) td_list.append(tds) for i in td_list: if i[1] == 'JPY': JPY = i[2].replace(' ','') if i[1] == 'USD': USD = i[2].replace(' ','') JPY = float(JPY) / 100 foreignbank_info(conn, 'MYR', USD, JPY, now)
def viet_crawling(conn): now = datetime.datetime.now() viet_bank = requests.get("https://www.sbv.gov.vn/TyGia/faces/ExchangeRate.jspx?_adf.ctrl-state=189twepxym_4&_afrLoop=6492512555058297") soup = BeautifulSoup(viet_bank.content,"html.parser") table = soup.find_all("table", {"class":"jrPage"}) trs = table[1].find_all("tr") usd_tr = trs[3] jpy_tr = trs[5] usd_td = usd_tr.find_all("td") jpy_td = jpy_tr.find_all("td") usd = usd_td[4].text.replace(',','') jpy = jpy_td[4].text foreignbank_info(conn, 'VND', usd, jpy, now)
def norway_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get( "https://data.norges-bank.no/api/data/EXR/B..NOK.SP?format=sdmx-new-json&apisrc=nbi&lastNObservations=2&locale=en" ) result = json.loads(response.text) JPY = result['data']['dataSets'][0]['series']['0:17:0:0']['observations'][ '1'][0] USD = result['data']['dataSets'][0]['series']['0:33:0:0']['observations'][ '1'][0] # 100엔 기준으로 값이 나오기 때문에 1엔 기준으로 바꿈 JPY = float(JPY) / 100 foreignbank_info(conn, 'NOK', USD, JPY, now)
def eu_craw(conn): now = datetime.datetime.now() session = requests.session() response = session.get("https://www.eurobank.com.cy/en-us/contact-support/useful-tools/curency-rates") bs = BeautifulSoup(response.text,"html.parser") tmrow_table = bs.select(".tabsrow.tmrow") for i in tmrow_table: if i.find_all('p')[0].text == 'USD': USD = float(i.find_all('p')[2].text) elif i.find_all('p')[0].text == 'JPY': JPY = float(i.find_all('p')[2].text) # 현재 1EUR 기준으로 USD, JPY 값이 나오기 때문에 1USD, 1JPY 기준으로 바꿔줌 USD = 1 / USD JPY = 1 / JPY foreignbank_info(conn, 'EUR', USD, JPY, now)