Exemplo n.º 1
0
def ino(site):
    """
    live forex rates scraped from ino.com
    """
    uri = "https://assets.ino.com/data/history/"

    try:
        data = {}
        symbols = ["CNY", "RUB", "EUR", "JPY", "KRW", "GBP"]
        for symbol in symbols:
            query = f"?s=FOREX_USD{symbol}&b=&f=json"
            url = uri + query
            ret = requests.get(url, timeout=(15, 15)).json()[-1][-2]
            data["USD:" + symbol] = float(ret)
        symbols = ["XAG", "XAU"]
        for symbol in symbols:
            query = f"?s=FOREX_{symbol}USDO&b=&f=json"
            url = uri + query
            ret = requests.get(url, timeout=(15, 15)).json()[-1][-2]
            data["USD:" + symbol] = 1 / float(ret)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 2
0
def aastock(site):
    """
    live forex rates scraped from aastock.com (morningstar backdoor)
    """

    uri = "http://www.aastocks.com/en/resources/datafeed/getrtcurconvert.ashx?curr="
    symbols = "USDCNY,USDEUR,USDGBP,USDKRW,USDJPY,USDXAU,USDXAG"
    url = uri + symbols

    try:
        raw = requests.get(url).json()
        data = {}
        for item in raw:
            if item["to"] == "USD":
                if item["from"] in ["XAU", "XAG"]:
                    data[item["to"] + ":" +
                         item["from"]] = 1 / float(item["price"])
            else:
                data[item["symbol"].replace("USD",
                                            "USD:")] = float(item["price"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 3
0
def forextime(site):
    """
    live forex rates scraped from forextime.com
    """
    def parse(values):
        """
        extract price from raw json
        """
        price = (float(values["ask"]) + float(values["bid"])) / 2
        if values["name"][-3:] == "USD":
            price = 1 / price
        return price

    uri = "https://www.forextime.com/informers/rates/symbols?symbols="
    symbols = "EURUSD,GBPUSD,USDRUB,USDJPY,XAUUSD,XAGUSD"
    url = uri + symbols
    try:
        raw = requests.get(url, timeout=(15, 15)).json()
        data = {}
        for pair, values in raw.items():
            price = parse(values)
            symbol = "USD:" + pair.replace("USD", "")
            data[symbol] = price
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 4
0
def forexrates(site):
    """
    live forex rates scraped from forexrates.net
    """
    def parse(raw):
        """ 
        remove xml tags, return list of rates split with =
        """
        lines = [i.split(";")[0] for i in raw.split("Values")]
        lines.pop(0)
        return [
            i.replace('"', "").replace("[", "").replace("]",
                                                        "").replace(" ", "")
            for i in lines
        ]

    url = "https://www.forexrates.net/widget/FR-FRW-2.php?"
    symbols = "c1=USD/EUR&c2=USD/GBP&c3=USD/RUB&c4=USD/JPY&c5=USD/CNY"
    url += symbols
    try:
        raw = requests.get(url, timeout=20).text
        rates = parse(raw)
        data = {}
        for rate in rates:
            symbol = rate.split("=")[0].replace("USD", "USD:")
            price = float(rate.split("=")[1])
            data[symbol] = price
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 5
0
def barchart(site):
    """
    https://ondemand.websol.barchart.com/getQuote.json?apikey=key&symbols=AAPL%2CGOOG
    limit 400 per day
    """
    key = config_apikeys()[site]
    url = "https://marketdata.websol.barchart.com/getQuote.json"
    symbols = "^USDEUR,^USDJPY,^USDGBP,^USDCNY,^USDKRW,^USDRUB,^XAGUSD,^XAUUSD"
    params = {"symbols": symbols, "apikey": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["results"]
        data = {}
        for item in ret:
            try:
                data[item["symbol"].replace("^USD",
                                            "USD:")] = float(item["lastPrice"])
            except:
                pass
        data["USD:XAG"] = 1 / data.pop("^XAGUSD")
        data["USD:XAU"] = 1 / data.pop("^XAUUSD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 6
0
def wocu(site):
    """
    live forex rates scraped from wocu.com
    XAU XAG are not precise
    """
    def parse(raw, symbol):
        """
        attempt to extract a float value from the html matrix
        """
        return float(raw.split(symbol)[1].split("</td>")[2].split(">")[1])

    url = "http://54.154.247.217/wocutab.php"
    symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY"]
    try:
        raw = requests.get(url, timeout=(15, 15)).text
        data = {}
        for symbol in symbols:
            try:
                data["USD:" + symbol] = parse(raw, symbol)
            except:
                pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 7
0
def fscapi(site):
    """
    https://fcsapi.com/document/forex-api
    https://fcsapi.com/api/forex/latest?symbol=USD/JPY&access_key=key
    limit 10 per minute
    """
    key = config_apikeys()[site]
    url = "https://fcsapi.com/api/forex/latest"
    symbols = "USD/EUR,USD/JPY,USD/GBP,USD/CNY,USD/KRW,USD/RUB,XAU/USD,XAG/USD"
    params = {"symbol": symbols, "access_key": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["response"]
        data = {}
        for item in ret:
            try:
                data[item["symbol"].replace("/", ":")] = float(
                    item["price"].replace(",", ""))
            except:
                pass
        data["USD:XAG"] = 1 / data.pop("XAG:USD")
        data["USD:XAU"] = 1 / data.pop("XAU:USD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 8
0
def duckduckgo(site):
    """
    live forex rates scraped from XE via duckduckgo.com
    """
    uri = "https://duckduckgo.com/js/spice/currency/1/usd/"
    try:
        data = {}
        currencies = ["XAU", "XAG", "KRW", "JPY", "RUB"]
        # AUD,CAD,COP,EUR,GBP,INR,MXN,MYR,ZAR containted in topConversions by default
        for currency in currencies:
            if currency in ["XAU", "XAG"]:
                url = uri.replace("usd/", "") + currency + "/usd"
            else:
                url = uri + currency
            raw = requests.get(url, timeout=(15, 15)).text
            raw = (raw.replace("\n",
                               "").replace(" ",
                                           "").replace("ddg_spice_currency(",
                                                       "").replace(");", ""))
            ret = json_loads(raw)
            if currency in ["XAU", "XAG"]:
                data["USD:" + currency] = 1 / float(
                    ret["conversion"]["converted-amount"])
            else:
                data["USD:" + currency] = float(
                    ret["conversion"]["converted-amount"])
            time.sleep(1)
        for item in ret["topConversions"]:
            data["USD:" + item["to-currency-symbol"]] = float(
                item["converted-amount"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 9
0
def fixerio(site):
    """
    http://data.fixer.io/api/latest?access_key=key&base=USD&symbols=AUD,CAD
    limit 1000 per month (hourly updates)

    NOTE: XAU and XAG are NOT ACCURATE (SUPPORT TICKET OPEN)
    """
    key = config_apikeys()[site]
    url = "http://data.fixer.io/api/latest"
    symbols = "USD,RUB,GBP,CNY,KRW,JPY"
    params = {"symbols": symbols, "access_key": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["rates"]
        eurusd = float(ret["USD"])
        usdeur = 1 / eurusd
        data = {
            "USD:EUR": usdeur,
            "USD:RUB": float(ret["RUB"]) * usdeur,
            "USD:GBP": float(ret["GBP"]) * usdeur,
            "USD:CNY": float(ret["CNY"]) * usdeur,
            "USD:KRW": float(ret["KRW"]) * usdeur,
            "USD:JPY": float(ret["JPY"]) * usdeur,
        }
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 10
0
def oanda(site):
    """
    make external request, decode, decrypt, reformat to dict
    """
    key = "aaf6cb4f0ced8a211c2728328597268509ade33040233a11af"
    url = "https://www1.oanda.com/lfr/rates_lrrr?tstamp="

    def hex_decode(raw):
        """
        latin-1 from hexidecimal
        """
        return bytes.fromhex("0" + raw if len(raw) %
                             2 else raw).decode("latin-1")

    def rc4(cypher, key):
        """
        decryption of rc4 stream cypher from latin-1
        """
        idx1 = 0
        output = []
        r256 = [*range(256)]
        for idx2 in range(256):
            idx1 = (idx1 + r256[idx2] + ord(cypher[idx2 % len(cypher)])) % 256
            r256[idx2], r256[idx1] = r256[idx1], r256[idx2]
        idx1, idx2 = 0, 0
        for _, item in enumerate(key):
            idx2 = (idx2 + 1) % 256
            idx1 = (idx1 + r256[idx2]) % 256
            r256[idx2], r256[idx1] = r256[idx1], r256[idx2]
            output.append(
                chr(ord(item) ^ r256[(r256[idx2] + r256[idx1]) % 256]))
        return ("").join(output)

    try:
        while True:
            try:
                millies = str(int(round(time.time() * 1000)))
                raw = requests.get(url + millies, timeout=(15, 15)).text
                hex_decoded = hex_decode(raw)
                decrypted = rc4(key, hex_decoded)
                break
            except:
                time.sleep(5)
        content = decrypted.split("\n")
        parsed = {
            raw.split("=")[0]:
            (float(raw.split("=")[1]) + float(raw.split("=")[2])) / 2
            for raw in content
        }
        data = {}
        for pair, price in parsed.items():
            data[pair.replace("/", ":")] = float(price)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def bitcoinaverage(site):
    """
    live forex rates scraped from bitcoinaverage.com
    """
    url = "https://apiv2.bitcoinaverage.com/frontend/constants/exchangerates/local"
    try:
        session = requests.Session()
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, timeout=(15, 15)).json()["rates"]
        data = {}
        for key, val in ret.items():
            data["USD:" + key] = float(val["rate"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 12
0
def freeforex(site):
    """
    live forex rates scraped from freeforexapi.com
    """
    url = "https://www.freeforexapi.com/api/live?pairs="
    currencies = "EURUSD,GBPUSD,USDJPY"
    url += currencies
    try:
        ret = requests.get(url, timeout=(15, 15)).json()["rates"]
        data = {}
        for key, val in ret.items():
            symbol = key[:3] + ":" + key[-3:]
            data[symbol] = float(val["rate"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 13
0
def reuters(site):
    """
    live forex rates scraped from reuters.com (refinitiv)
    """
    uri = "https://www.reuters.com/companies/api/getFetchQuotes/"
    symbols = "USDCNY,USDJPY,USDKRW,USDRUB,USDGBP,USDEUR"
    url = uri + symbols
    try:
        raw = requests.get(url, timeout=(15, 15)).json()
        ret = raw["market_data"]["currencypairs"]
        data = {}
        for item in ret:
            price = (float(item["bid"]) + float(item["ask"])) / 2
            data[item["symbol"].replace("USD", "USD:")] = price
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 14
0
def wsj(site):
    """
    live forex rates scraped from wsj.com
    """
    uri = "https://api.wsj.net/api/deltoro-mw/marketwatchsite/quote/currency/convert"
    try:
        currencies = ["EUR", "CNY", "RUB", "KRW", "JPY"]
        data = {}
        for currency in currencies:
            endpoint = f"?from=USD&to={currency}USD&amount=1.00"
            url = uri + endpoint
            raw = requests.get(url, timeout=(15, 15)).text
            data["USD:" + currency] = float(raw)
            time.sleep(1)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def investing(site):
    """
    live forex rates scraped from investing.com
    https://www.investing.com/webmaster-tools/live-currency-cross-rates
    """
    url = ("https://www.widgets.investing.com/live-currency-cross-rates?" +
           "theme=darkTheme&cols=last&pairs=3,2111,2124,2126,650,962711,69,68")
    headers = {
        "href":
        ("https://www.investing.com?utm_source=WMT&amp;utm_medium=referral&amp;"
         + "utm_campaign=LIVE_CURRENCY_X_RATES&amp;utm_content=Footer%20Link"),
        "target":
        "_blank",
        "rel":
        "nofollow",
    }
    try:
        session = requests.Session()
        session.headers = headers
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, headers=headers,
                                    timeout=(15, 15)).text
        lines = ret.split('target="_blank"')
        lines = [i.replace(" ", "").replace(",", "") for i in lines]
        lines = [i for i in lines if "askpid" in i]
        lines = [i.split("hiddenFour")[0] for i in lines]
        data = {}
        for item in lines:
            data[item.split("</a>")[0].replace(">",
                                               "")] = item.split('last">')[1]
        data = {
            k.replace("/", ":"): v.split("</div>")[0]
            for k, v in data.items()
        }
        data = {k: float(v) for k, v in data.items()}
        data["USD:XAG"] = 1 / data.pop("XAG:USD")
        data["USD:XAU"] = 1 / data.pop("XAU:USD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 16
0
def exchangeratewidget(site):
    """
    live forex rates scraped from exchangeratewidget.com
    """
    url = "https://www.exchangeratewidget.com/converter.php?v=11&t="
    symbols = ["USDEUR", "USDGBP", "USDJPY", "USDCNY", "USDRUB", "USDKRW"]
    for symbol in symbols:
        url += symbol + ","
    try:
        data = {}
        raw = requests.get(url, timeout=20).text
        for symbol in symbols:
            currency = symbol.replace("USD", "")
            price = raw.split(currency)[1].split("</span>")[1].split(">")[1]
            data["USD:" + currency] = float(price)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 17
0
def fxmarket(site):
    """
    https://fixer.io/documentation
    https://fxmarketapi.com/apilive?api_key=key&currency=EURUSD,GBPUSD
    1000 / month
    """
    key = config_apikeys()[site]
    url = "https://fxmarketapi.com/apilive"
    symbols = "USDEUR,USDGBP,USDCNY,USDKRW,USDRUB,USDJPY,XAUUSD,XAGUSD"
    params = {"currency": symbols, "api_key": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["price"]
        data = {k.replace("USD", "USD:"): float(v) for k, v in ret.items()}
        data["USD:XAG"] = 1 / data.pop("XAGUSD:")
        data["USD:XAU"] = 1 / data.pop("XAUUSD:")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 18
0
def fxrate(site):
    """
    live forex rates scraped from fx-rate.net
    """
    def parse(raw):
        """
        extract data from xml
        """
        ret = raw.split('title="American Dollar to')[1]
        ret = ret.split("per")[0].split(";")[-1].replace(" ", "")
        return float(ret)

    def parse_metal(raw):
        """
        extract data from xml
        """
        ret = raw.split('to American Dollar Rates')[1]
        ret = ret.split("per")[0].split(";")[-1].replace(" ", "")
        return 1 / float(ret)

    try:
        data = {}
        uri = "https://fx-rate.net/conversion.php?currency=USD&currency_pair="
        symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY"]
        for symbol in symbols:
            url = uri + symbol
            raw = requests.get(url, timeout=(15, 15)).text
            data["USD:" + symbol] = parse(raw)
        symbols = ["XAG", "XAU"]
        uri = "https://fx-rate.net/conversion.php?"
        for symbol in symbols:
            endpoint = f"currency={symbol}&currency_pair=USD"
            url = uri + endpoint
            raw = requests.get(url, timeout=(15, 15)).text
            data["USD:" + symbol] = parse_metal(raw)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 19
0
def openexchangerates(site):
    """
    https://docs.openexchangerates.org/
    https://openexchangerates.org/api/latest.json?app_id=key
    limit 1000 per month (hourly updates)
    """
    key = config_apikeys()[site]
    url = "https://openexchangerates.org/api/latest.json"
    params = {"app_id": key}
    symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY", "XAU", "XAG"]
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["rates"]
        data = {}
        for key, val in ret.items():
            if key in symbols:
                data["USD:" + key] = float(val)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 20
0
def liveusd(site):
    """
    live forex rates scraped from liveusd.com
    """
    url = "http://liveusd.com/veri/refresh/total.php"
    try:
        ret = requests.get(url, timeout=(15, 15)).text
        ret = ret.replace(" ", "").split("\n")
        data = {}
        for item in ret:
            if item:
                try:
                    pair = item.split(":")[0].replace("USD", "USD:")
                    price = item.split(":")[1]
                    data[pair] = float(price)
                except:
                    pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 21
0
def currencyconverter(site):
    """
    https://free.currconv.com/api/v7/convert?q=USD_PHP&compact=ultra&apikey=key
    100/hour two pairs per request
    """
    key = config_apikeys()[site]
    url = "https://free.currconv.com/api/v7/convert"
    symbols = ["USD_EUR,USD_GBP", "USD_CNY,USD_KRW", "USD_JPY,USD_RUB"]
    try:
        data = {}
        for symbol in symbols:
            try:
                params = {"compact": "y", "apikey": key, "q": symbol}
                ret = requests.get(url, params=params, timeout=(15, 15)).json()
                for key, val in ret.items():
                    data[key.replace("_", ":")] = float(val["val"])
            except:
                pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 22
0
def fxempire2(site):
    """
    live forex rates scraped from fxempire (tradingview backdoor)
    ?symbol=USD-RUB&resolution=1&from=158016000&to=1580162240"
    """

    uri = "https://tradingview.fxempire.com/api/history"
    symbols = [
        "USD-CNY", "USD-RUB", "USD-EUR", "USD-GBP", "USD-KRW", "USD-JPY"
    ]
    try:
        data = {}
        for symbol in symbols:
            now = int(time.time())
            then = int((now - 200) / 10)  # note weird /10 here
            params = f"?symbol={symbol}&resolution=1&from={then}&to={now}"
            url = uri + params
            raw = requests.get(url).json()["c"][-1]
            data[symbol.replace("-", ":")] = float(raw)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 23
0
def currencyme(site):
    """
    live forex rates scraped from currency.me.uk
    """
    symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY"]
    url = "https://www.currency.me.uk/remote/ER-CCCS2-AJAX.php"
    try:
        data = {}
        for symbol in symbols:
            try:
                params = {
                    "ConvertTo": symbol,
                    "ConvertFrom": "USD",
                    "amount": 1
                }
                raw = requests.get(url, params=params, timeout=(15, 15)).text
                data["USD:" + symbol] = float(raw.replace(" ", ""))
            except:
                pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 24
0
def finviz(site):
    """
    live forex rates scraped from finviz.com
    """
    url = "https://finviz.com/api/forex_all.ashx?timeframe=m5"
    try:
        ret = requests.get(url, timeout=(15, 15)).json()
        data = {}
        data["AUD:USD"] = float(ret["AUDUSD"]["last"])
        data["EUR:GBP"] = float(ret["EURGBP"]["last"])
        data["EUR:USD"] = float(ret["EURUSD"]["last"])
        data["GBP:JPY"] = float(ret["GBPJPY"]["last"])
        data["GBP:USD"] = float(ret["GBPUSD"]["last"])
        data["USD:CAD"] = float(ret["USDCAD"]["last"])
        data["NZD:USD"] = float(ret["NZDUSD"]["last"])
        data["USD:CHF"] = float(ret["USDCHF"]["last"])
        data["USD:JPY"] = float(ret["USDJPY"]["last"])
        data["XAG:USD"] = float(ret["SI"]["last"])
        data["XAU:USD"] = float(ret["GC"]["last"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 25
0
def yahoo(site):
    """
    live forex rates scraped from yahoo finance
    XAU and XAG are stale
    CG=F is a gold 30 day future
    SI=F is silver 60 day future
    """
    uri = "https://query1.finance.yahoo.com/v7/finance/spark?symbols=USD"
    try:
        currencies = ["EUR", "CNY", "RUB", "KRW", "JPY"]
        data = {}
        for currency in currencies:
            endpoint = f"{currency}%3DX&range=1m&interval=1m"
            url = uri + endpoint
            raw = requests.get(url, timeout=(15, 15)).json()
            ret = raw["spark"]["result"][0]["response"][0]["meta"][
                "regularMarketPrice"]
            data["USD:" + currency] = float(ret)
            time.sleep(1)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def fxempire1(site):
    """
    live forex rates scraped from fxempire.com (backdoor to xignite)
    """
    url = "https://www.fxempire.com/api/v1/en/markets/list"
    headers = {
        "authority":
        "www.fxempire.com",
        "method":
        "GET",
        "path":
        "/api/v1/en/markets/list",
        "scheme":
        "https",
        "accept":
        ("text/html,application/xhtml+xml,application/xml;q=0.9,image/webp," +
         "image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"),
        "accept-encoding":
        "gzip, deflate, br",
        "accept-language":
        "en-US,en;q=0.9",
        "cache-control":
        "max-age=0",
        "dnt":
        "1",
        "sec-fetch-mode":
        "navigate",
        "sec-fetch-site":
        "none",
        "sec-fetch-user":
        "******",
        "upgrade-insecure-requests":
        "1",
        "user-agent":
        ("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36" +
         " (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36 OPR/66.0.3515.27"
         ),
    }
    try:
        session = requests.Session()
        session.headers = headers
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, timeout=(15, 15)).json()
        data = {}
        for item in ret["forex"]:
            if item:
                try:
                    pair = item["name"].replace("/", ":")
                    price = item["value"]
                    data[pair] = float(price)
                except:
                    pass
        for item in ret["commodities"]:
            try:
                if item["symbol"] in ["XAUUSD", "XAGUSD"]:
                    pair = "USD:" + item["symbol"].replace("USD", "")
                    price = 1 / float(item["value"])
                    data[pair] = price
            except:
                pass
        data = {k: v
                for k, v in data.items() if "RUB" not in k}  # RUBLE is stale
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def fxcm(site):
    """
    live forex rates scraped from fxcm.com
    """
    timestamp = int(time.time() * 1000) - 1000
    url = f"https://ratesjson.fxcm.com/DataDisplayer?t={timestamp}"
    headers = {
        "authority":
        "www.fxcm.com",
        "method":
        "GET",
        "path":
        "/api/v1/en/markets/list",
        "scheme":
        "https",
        "accept":
        ("text/html,application/xhtml+xml,application/xml;q=0.9,image/webp," +
         "image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"),
        "accept-encoding":
        "gzip, deflate, br",
        "accept-language":
        "en-US,en;q=0.9",
        "cache-control":
        "max-age=0",
        "dnt":
        "1",
        "sec-fetch-mode":
        "navigate",
        "sec-fetch-site":
        "none",
        "sec-fetch-user":
        "******",
        "upgrade-insecure-requests":
        "1",
        "user-agent":
        ("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, " +
         "like Gecko) Chrome/79.0.3945.79 Safari/537.36 OPR/66.0.3515.27"),
    }
    try:
        # fails during some hours of day
        session = requests.Session()
        session.headers = headers
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, timeout=(15, 15)).text
        # print (ret)
        data = (ret.replace(" ",
                            "").replace('null({"Rates":',
                                        "").replace(",}]});",
                                                    "}]").replace(",}", "}"))
        # print(data)
        # {"Symbol":"CHFJPY","Bid":"1.1","Ask":"1.2","Spread":"0.1","ProductType":"1",}
        raw = json_loads(data)
        data = {}
        for item in raw:
            symbol = item["Symbol"]
            if symbol.isupper() and (len(symbol) == 6):
                symbol = symbol[:3] + ":" + symbol[-3:]
                data[symbol] = (float(item["Ask"]) + float(item["Bid"])) / 2
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def bloomberg(site):
    """
    live forex rates scraped from bloomberg.com
    """
    uri = "https://www.bloomberg.com/markets/api/bulk-time-series/price/"
    endpoint = "USDCNY%3ACUR,USDRUB%3ACUR,USDJPY%3ACUR,USDEUR%3ACUR,USDKRW%3ACUR,XAUUSD%3ACUR,XAGUSD%3ACUR"
    url = uri + endpoint
    headers = {
        "authority":
        "www.bloomberg.com",
        "method":
        "GET",
        "path":
        ("/markets/api/comparison/data?securities=" +
         "USDCNY%3ACUR,USDRUB%3ACUR,USDJPY%3ACUR,USDEUR%3ACUR,USDKRW%3ACUR,XAUUSD%3ACUR,XAGUSD%3ACUR"
         + "&securityType=CURRENCY&locale=en"),
        "scheme":
        "https",
        "accept":
        ("text/html,application/xhtml+xml,application/xml;q=0.9,image/" +
         "webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"),
        "accept-encoding":
        "gzip, deflate, br",
        "accept-language":
        "en-US,en;q=0.9",
        "cache-control":
        "max-age=0",
        "cookie":
        ("bbAbVisits=1; _pxhd=e24b47c64d37711c147cfb3c4b35c845563d2f9831b" +
         "03d9189f8cd761bc2be4f:d78eeb01-34c9-11ea-8f86-51d2aad9afb3; _px" +
         "vid=d78eeb01-34c9-11ea-8f86-51d2aad9afb3; _reg-csrf=s%3Ab0pWvbcs" +
         "UtrjYeJ0T2GrTaaD.8kaQlvHchJ1D%2FZZMaQWQiTizJTxrqqyzzuEZHEvlQNw;" +
         " agent_id=7989385a-d6d9-4446-b7aa-3c937407862b;" +
         " session_id=5702901e-d5fe-41e7-b259-df46322015e0;" +
         " session_key=3179869387f4c4ec4385e0d16222f0e59f48c47f;" +
         " _user-status=anonymous; _is-ip-whitelisted=false;" +
         " _user-ip=91.132.137.116; trc_cookie_storage=taboola%2520global%253"
         + "Auser-id%3D2f4acdc6-7c3c-412c-8766-d9c80dcffc38-tuct513df3e;" +
         " bdfpc=004.0586371899.1578785723722;" +
         " _reg-csrf-token=4ZxUa9q8-fkNXQkoHHXhnobWne1sDlIVcKEQ"),
        "dnt":
        "1",
        "if-none-match":
        'W/"lZU52eQYxjadyNKGCyftEg=="',
        "sec-fetch-mode":
        "navigate",
        "sec-fetch-site":
        "none",
        "sec-fetch-user":
        "******",
        "upgrade-insecure-requests":
        "1",
        "user-agent":
        ("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36" +
         " (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36 OPR/66.0.3515.27"
         ),
    }
    try:
        session = requests.Session()
        session.headers = headers
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, timeout=(15, 15)).json()
        data = {}
        for item in ret:
            symbol = item["id"].replace(":CUR", "")
            symbol = symbol[:3] + ":" + symbol[-3:]
            data[symbol] = float(item["lastPrice"])
        data["USD:XAG"] = 1 / data.pop("XAG:USD")
        data["USD:XAU"] = 1 / data.pop("XAU:USD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")