Exemplo n.º 1
0
def forexrates(site):
    """
    live forex rates scraped from forexrates.net
    """
    def parse(raw):
        """ 
        remove xml tags, return list of rates split with =
        """
        lines = [i.split(";")[0] for i in raw.split("Values")]
        lines.pop(0)
        return [
            i.replace('"', "").replace("[", "").replace("]",
                                                        "").replace(" ", "")
            for i in lines
        ]

    url = "https://www.forexrates.net/widget/FR-FRW-2.php?"
    symbols = "c1=USD/EUR&c2=USD/GBP&c3=USD/RUB&c4=USD/JPY&c5=USD/CNY"
    url += symbols
    try:
        raw = requests.get(url, timeout=20).text
        rates = parse(raw)
        data = {}
        for rate in rates:
            symbol = rate.split("=")[0].replace("USD", "USD:")
            price = float(rate.split("=")[1])
            data[symbol] = price
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 2
0
def aastock(site):
    """
    live forex rates scraped from aastock.com (morningstar backdoor)
    """

    uri = "http://www.aastocks.com/en/resources/datafeed/getrtcurconvert.ashx?curr="
    symbols = "USDCNY,USDEUR,USDGBP,USDKRW,USDJPY,USDXAU,USDXAG"
    url = uri + symbols

    try:
        raw = requests.get(url).json()
        data = {}
        for item in raw:
            if item["to"] == "USD":
                if item["from"] in ["XAU", "XAG"]:
                    data[item["to"] + ":" +
                         item["from"]] = 1 / float(item["price"])
            else:
                data[item["symbol"].replace("USD",
                                            "USD:")] = float(item["price"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 3
0
def ino(site):
    """
    live forex rates scraped from ino.com
    """
    uri = "https://assets.ino.com/data/history/"

    try:
        data = {}
        symbols = ["CNY", "RUB", "EUR", "JPY", "KRW", "GBP"]
        for symbol in symbols:
            query = f"?s=FOREX_USD{symbol}&b=&f=json"
            url = uri + query
            ret = requests.get(url, timeout=(15, 15)).json()[-1][-2]
            data["USD:" + symbol] = float(ret)
        symbols = ["XAG", "XAU"]
        for symbol in symbols:
            query = f"?s=FOREX_{symbol}USDO&b=&f=json"
            url = uri + query
            ret = requests.get(url, timeout=(15, 15)).json()[-1][-2]
            data["USD:" + symbol] = 1 / float(ret)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 4
0
def fixerio(site):
    """
    http://data.fixer.io/api/latest?access_key=key&base=USD&symbols=AUD,CAD
    limit 1000 per month (hourly updates)

    NOTE: XAU and XAG are NOT ACCURATE (SUPPORT TICKET OPEN)
    """
    key = config_apikeys()[site]
    url = "http://data.fixer.io/api/latest"
    symbols = "USD,RUB,GBP,CNY,KRW,JPY"
    params = {"symbols": symbols, "access_key": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["rates"]
        eurusd = float(ret["USD"])
        usdeur = 1 / eurusd
        data = {
            "USD:EUR": usdeur,
            "USD:RUB": float(ret["RUB"]) * usdeur,
            "USD:GBP": float(ret["GBP"]) * usdeur,
            "USD:CNY": float(ret["CNY"]) * usdeur,
            "USD:KRW": float(ret["KRW"]) * usdeur,
            "USD:JPY": float(ret["JPY"]) * usdeur,
        }
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 5
0
def pricefeed_forex():
    """
    create a forex price feed, write it to disk, and return it
    """
    forex = aggregate_rates()
    race_write(doc="pricefeed_forex.txt", text=json_dumps(forex))
    return forex
Exemplo n.º 6
0
def forextime(site):
    """
    live forex rates scraped from forextime.com
    """
    def parse(values):
        """
        extract price from raw json
        """
        price = (float(values["ask"]) + float(values["bid"])) / 2
        if values["name"][-3:] == "USD":
            price = 1 / price
        return price

    uri = "https://www.forextime.com/informers/rates/symbols?symbols="
    symbols = "EURUSD,GBPUSD,USDRUB,USDJPY,XAUUSD,XAGUSD"
    url = uri + symbols
    try:
        raw = requests.get(url, timeout=(15, 15)).json()
        data = {}
        for pair, values in raw.items():
            price = parse(values)
            symbol = "USD:" + pair.replace("USD", "")
            data[symbol] = price
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 7
0
def barchart(site):
    """
    https://ondemand.websol.barchart.com/getQuote.json?apikey=key&symbols=AAPL%2CGOOG
    limit 400 per day
    """
    key = config_apikeys()[site]
    url = "https://marketdata.websol.barchart.com/getQuote.json"
    symbols = "^USDEUR,^USDJPY,^USDGBP,^USDCNY,^USDKRW,^USDRUB,^XAGUSD,^XAUUSD"
    params = {"symbols": symbols, "apikey": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["results"]
        data = {}
        for item in ret:
            try:
                data[item["symbol"].replace("^USD",
                                            "USD:")] = float(item["lastPrice"])
            except:
                pass
        data["USD:XAG"] = 1 / data.pop("^XAGUSD")
        data["USD:XAU"] = 1 / data.pop("^XAUUSD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 8
0
def duckduckgo(site):
    """
    live forex rates scraped from XE via duckduckgo.com
    """
    uri = "https://duckduckgo.com/js/spice/currency/1/usd/"
    try:
        data = {}
        currencies = ["XAU", "XAG", "KRW", "JPY", "RUB"]
        # AUD,CAD,COP,EUR,GBP,INR,MXN,MYR,ZAR containted in topConversions by default
        for currency in currencies:
            if currency in ["XAU", "XAG"]:
                url = uri.replace("usd/", "") + currency + "/usd"
            else:
                url = uri + currency
            raw = requests.get(url, timeout=(15, 15)).text
            raw = (raw.replace("\n",
                               "").replace(" ",
                                           "").replace("ddg_spice_currency(",
                                                       "").replace(");", ""))
            ret = json_loads(raw)
            if currency in ["XAU", "XAG"]:
                data["USD:" + currency] = 1 / float(
                    ret["conversion"]["converted-amount"])
            else:
                data["USD:" + currency] = float(
                    ret["conversion"]["converted-amount"])
            time.sleep(1)
        for item in ret["topConversions"]:
            data["USD:" + item["to-currency-symbol"]] = float(
                item["converted-amount"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 9
0
def wocu(site):
    """
    live forex rates scraped from wocu.com
    XAU XAG are not precise
    """
    def parse(raw, symbol):
        """
        attempt to extract a float value from the html matrix
        """
        return float(raw.split(symbol)[1].split("</td>")[2].split(">")[1])

    url = "http://54.154.247.217/wocutab.php"
    symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY"]
    try:
        raw = requests.get(url, timeout=(15, 15)).text
        data = {}
        for symbol in symbols:
            try:
                data["USD:" + symbol] = parse(raw, symbol)
            except:
                pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def request(api, signal):
    """
    GET remote procedure call to public exchange API
    """
    urls = return_urls()
    api["method"] = "GET"
    api["headers"] = {}
    api["data"] = ""
    api["key"] = ""
    api["passphrase"] = ""
    api["secret"] = ""
    api["url"] = urls[api["exchange"]]
    url = api["url"] + api["endpoint"]
    # print(api)
    time.sleep(10 * random())
    resp = requests.request(
        method=api["method"],
        url=url,
        data=api["data"],
        params=api["params"],
        headers=api["headers"],
    )
    try:
        data = resp.json()
    except:
        print(resp.text)
    doc = (api["exchange"] + api["pair"] + str(int(10**6 * api["nonce"])) +
           "_{}_public.txt".format(api["exchange"]))
    race_write(doc, json_dumps(data))
    signal.value = 1
Exemplo n.º 11
0
def fscapi(site):
    """
    https://fcsapi.com/document/forex-api
    https://fcsapi.com/api/forex/latest?symbol=USD/JPY&access_key=key
    limit 10 per minute
    """
    key = config_apikeys()[site]
    url = "https://fcsapi.com/api/forex/latest"
    symbols = "USD/EUR,USD/JPY,USD/GBP,USD/CNY,USD/KRW,USD/RUB,XAU/USD,XAG/USD"
    params = {"symbol": symbols, "access_key": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["response"]
        data = {}
        for item in ret:
            try:
                data[item["symbol"].replace("/", ":")] = float(
                    item["price"].replace(",", ""))
            except:
                pass
        data["USD:XAG"] = 1 / data.pop("XAG:USD")
        data["USD:XAU"] = 1 / data.pop("XAU:USD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 12
0
def refresh_forex_rates():
    """
    make process wrapped external calls; IPC via text pipe
    """
    methods = [
        aastock,  # DARKWEB API; MORNINGSTAR (GOOGLE FINANCE) BACKDOOR
        barchart,  # KEYED
        bitcoinaverage,  # MAY ADD CAPTCHA; HEADER REQUIRED; CLOUDFARE SPOOFING
        bloomberg,  # MAY ADD CAPTCHA; HEADER REQUIRED; CLOUDFARE SPOOFING
        currencyconverter,  # KEYED, NOT COMPATIBLE WITH VPN / DYNAMIC IP
        currencyme,  # DARKWEB API
        duckduckgo,  # XML SCRAPING, XE BACKDOOR
        exchangeratewidget,  # XML SCRAPING
        finviz,  # DARKWEB API
        fixerio,  # KEYED
        forexrates,  # XML SCRAPING
        forextime,  # DARKWEB API
        freeforex,  # FREE API
        fscapi,  # KEYED
        fxcm,  # CLOUDFARE SPOOFING; HEADER REQUIRED; ALMOST JSON RESPONSE
        fxempire1,  # XIGNITE BACKDOOR; HEADER REQUIRED; CLOUDFARE SPOOFING
        fxempire2,  # TRADINGVIEW BACKDOOR
        fxmarket,  # KEYED
        fxrate,  # XML SCRAPING
        ino,  # DARKWEB API
        investing,  # CLOUDFARE SPOOFING, XML SCRAPING
        liveusd,  # DARKWEB API
        oanda,  # DARKWEB API; RC4 ECRYPTION OF LATIN ENCODING
        openexchangerates,  # KEYED
        reuters,  # REFINITIV BACKDOOR, DARKWEB API
        wocu,  # XML SCRAPING
        wsj,  # MARKETWATCH BACKDOOR, DARKWEB API
        yahoo,  # YAHOO FINANCE V7 DARKWEB API
    ]
    # initialize each external call method as a process
    processes = {}
    for method in methods:
        site = method.__name__
        race_write(f"{site}_forex.txt", {})
        processes[site] = Process(target=method, args=(site, ))
        processes[site].daemon = False
        processes[site].start()
        # FIXME: FOR DEPLOYMENT ON LOW COST WEB HOSTING SERVICES
        # FIXME: ALTERNATIVE RAM SAVINGS 0.5GB, WITH ADDED EXECUTION TIME OF 5 MINUTES
        # FIXME: **INCLUDE** NEXT 3 LINES FOR LOW RAM ALTERNATIVE
        # processes[site].join(TIMEOUT)
        # processes[site].terminate()
        # time.sleep(5)
    # FIXME: **EXCLUDE** NEXT 4 LINES FOR LOW RAM ALTERNATIVE
    for site in processes.keys():
        processes[site].join(TIMEOUT)
    for site in processes.keys():
        processes[site].terminate()
    # read the text pipe ipc results of each process
    sources = {}
    for site in processes.keys():
        sources[site] = race_read_json(f"{site}_forex.txt")
    return sources
Exemplo n.º 13
0
def oanda(site):
    """
    make external request, decode, decrypt, reformat to dict
    """
    key = "aaf6cb4f0ced8a211c2728328597268509ade33040233a11af"
    url = "https://www1.oanda.com/lfr/rates_lrrr?tstamp="

    def hex_decode(raw):
        """
        latin-1 from hexidecimal
        """
        return bytes.fromhex("0" + raw if len(raw) %
                             2 else raw).decode("latin-1")

    def rc4(cypher, key):
        """
        decryption of rc4 stream cypher from latin-1
        """
        idx1 = 0
        output = []
        r256 = [*range(256)]
        for idx2 in range(256):
            idx1 = (idx1 + r256[idx2] + ord(cypher[idx2 % len(cypher)])) % 256
            r256[idx2], r256[idx1] = r256[idx1], r256[idx2]
        idx1, idx2 = 0, 0
        for _, item in enumerate(key):
            idx2 = (idx2 + 1) % 256
            idx1 = (idx1 + r256[idx2]) % 256
            r256[idx2], r256[idx1] = r256[idx1], r256[idx2]
            output.append(
                chr(ord(item) ^ r256[(r256[idx2] + r256[idx1]) % 256]))
        return ("").join(output)

    try:
        while True:
            try:
                millies = str(int(round(time.time() * 1000)))
                raw = requests.get(url + millies, timeout=(15, 15)).text
                hex_decoded = hex_decode(raw)
                decrypted = rc4(key, hex_decoded)
                break
            except:
                time.sleep(5)
        content = decrypted.split("\n")
        parsed = {
            raw.split("=")[0]:
            (float(raw.split("=")[1]) + float(raw.split("=")[2])) / 2
            for raw in content
        }
        data = {}
        for pair, price in parsed.items():
            data[pair.replace("/", ":")] = float(price)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def bitcoinaverage(site):
    """
    live forex rates scraped from bitcoinaverage.com
    """
    url = "https://apiv2.bitcoinaverage.com/frontend/constants/exchangerates/local"
    try:
        session = requests.Session()
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, timeout=(15, 15)).json()["rates"]
        data = {}
        for key, val in ret.items():
            data["USD:" + key] = float(val["rate"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
def pricefeed_cex():
    """
    create a cex price feed, write it to disk, and return it
    """
    api = {}
    cex = {}
    api["pair"] = "BTC:USD"
    exchanges = [
        "bittrex",
        "bitfinex",
        "coinbase",
        "kraken",
        "bitstamp",
    ]
    cex[api["pair"]] = fetch(exchanges, api)
    api["pair"] = "BTS:BTC"
    exchanges = [
        "bittrex",
        "binance",
        "poloniex",
        "huobi",
        "hitbtc",
    ]
    cex[api["pair"]] = fetch(exchanges, api)

    exchanges = [
        "bittrex",
        "binance",
        "poloniex",
        "huobi",
        "hitbtc",
        "bitfinex",
        "coinbase",
        "kraken",
        "bitstamp",
    ]
    # api["pair"] = "LTC:BTC"
    # cex[api["pair"]] = fetch(exchanges, api)
    api["pair"] = "XRP:BTC"
    cex[api["pair"]] = fetch(exchanges, api)
    api["pair"] = "ETH:BTC"
    cex[api["pair"]] = fetch(exchanges, api)

    race_write("pricefeed_cex.txt", cex)
    return cex
Exemplo n.º 16
0
def freeforex(site):
    """
    live forex rates scraped from freeforexapi.com
    """
    url = "https://www.freeforexapi.com/api/live?pairs="
    currencies = "EURUSD,GBPUSD,USDJPY"
    url += currencies
    try:
        ret = requests.get(url, timeout=(15, 15)).json()["rates"]
        data = {}
        for key, val in ret.items():
            symbol = key[:3] + ":" + key[-3:]
            data[symbol] = float(val["rate"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 17
0
def reuters(site):
    """
    live forex rates scraped from reuters.com (refinitiv)
    """
    uri = "https://www.reuters.com/companies/api/getFetchQuotes/"
    symbols = "USDCNY,USDJPY,USDKRW,USDRUB,USDGBP,USDEUR"
    url = uri + symbols
    try:
        raw = requests.get(url, timeout=(15, 15)).json()
        ret = raw["market_data"]["currencypairs"]
        data = {}
        for item in ret:
            price = (float(item["bid"]) + float(item["ask"])) / 2
            data[item["symbol"].replace("USD", "USD:")] = price
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 18
0
def wsj(site):
    """
    live forex rates scraped from wsj.com
    """
    uri = "https://api.wsj.net/api/deltoro-mw/marketwatchsite/quote/currency/convert"
    try:
        currencies = ["EUR", "CNY", "RUB", "KRW", "JPY"]
        data = {}
        for currency in currencies:
            endpoint = f"?from=USD&to={currency}USD&amount=1.00"
            url = uri + endpoint
            raw = requests.get(url, timeout=(15, 15)).text
            data["USD:" + currency] = float(raw)
            time.sleep(1)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 19
0
def refresh_forex_rates():
    """
    make process wrapped external calls; IPC via text pipe
    """
    methods = [
        aastock,  # DARKWEB API; MORNINGSTAR (GOOGLE FINANCE) BACKDOOR
        bloomberg,  # MAY ADD CAPTCHA; HEADER REQUIRED; CLOUDFARE SPOOFING
        currencyme,  # DARKWEB API
        duckduckgo,  # XML SCRAPING, XE BACKDOOR
        exchangeratewidget,  # XML SCRAPING
        forexrates,  # XML SCRAPING
        forextime,  # DARKWEB API
        freeforex,  # FREE API
        fxcm,  # CLOUDFARE SPOOFING; HEADER REQUIRED; ALMOST JSON RESPONSE
        fxempire1,  # XIGNITE BACKDOOR; HEADER REQUIRED; CLOUDFARE SPOOFING
        fxempire2,  # TRADINGVIEW BACKDOOR
        fxrate,  # XML SCRAPING
        ino,  # DARKWEB API
        investing,  # CLOUDFARE SPOOFING, XML SCRAPING
        liveusd,  # DARKWEB API
        oanda,  # DARKWEB API; RC4 ECRYPTION OF LATIN ENCODING
        reuters,  # REFINITIV BACKDOOR, DARKWEB API
        wocu,  # XML SCRAPING
        wsj,  # MARKETWATCH BACKDOOR, DARKWEB API
        yahoo,  # YAHOO FINANCE V7 DARKWEB API
    ]
    # initialize each external call method as a process
    processes = {}
    for method in methods:
        site = method.__name__
        race_write(f"{site}_forex.txt", {})
        processes[site] = Process(target=method, args=(site, ))
        processes[site].daemon = False
        processes[site].start()
    for site in processes.keys():
        processes[site].join(TIMEOUT)
    for site in processes.keys():
        processes[site].terminate()
    # read the text pipe ipc results of each process
    sources = {}
    for site in processes.keys():
        sources[site] = race_read_json(f"{site}_forex.txt")
    return sources
def investing(site):
    """
    live forex rates scraped from investing.com
    https://www.investing.com/webmaster-tools/live-currency-cross-rates
    """
    url = ("https://www.widgets.investing.com/live-currency-cross-rates?" +
           "theme=darkTheme&cols=last&pairs=3,2111,2124,2126,650,962711,69,68")
    headers = {
        "href":
        ("https://www.investing.com?utm_source=WMT&amp;utm_medium=referral&amp;"
         + "utm_campaign=LIVE_CURRENCY_X_RATES&amp;utm_content=Footer%20Link"),
        "target":
        "_blank",
        "rel":
        "nofollow",
    }
    try:
        session = requests.Session()
        session.headers = headers
        cfscrape_requests = cfscrape.create_scraper(sess=session)
        ret = cfscrape_requests.get(url, headers=headers,
                                    timeout=(15, 15)).text
        lines = ret.split('target="_blank"')
        lines = [i.replace(" ", "").replace(",", "") for i in lines]
        lines = [i for i in lines if "askpid" in i]
        lines = [i.split("hiddenFour")[0] for i in lines]
        data = {}
        for item in lines:
            data[item.split("</a>")[0].replace(">",
                                               "")] = item.split('last">')[1]
        data = {
            k.replace("/", ":"): v.split("</div>")[0]
            for k, v in data.items()
        }
        data = {k: float(v) for k, v in data.items()}
        data["USD:XAG"] = 1 / data.pop("XAG:USD")
        data["USD:XAU"] = 1 / data.pop("XAU:USD")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 21
0
def exchangeratewidget(site):
    """
    live forex rates scraped from exchangeratewidget.com
    """
    url = "https://www.exchangeratewidget.com/converter.php?v=11&t="
    symbols = ["USDEUR", "USDGBP", "USDJPY", "USDCNY", "USDRUB", "USDKRW"]
    for symbol in symbols:
        url += symbol + ","
    try:
        data = {}
        raw = requests.get(url, timeout=20).text
        for symbol in symbols:
            currency = symbol.replace("USD", "")
            price = raw.split(currency)[1].split("</span>")[1].split(">")[1]
            data["USD:" + currency] = float(price)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 22
0
def fxmarket(site):
    """
    https://fixer.io/documentation
    https://fxmarketapi.com/apilive?api_key=key&currency=EURUSD,GBPUSD
    1000 / month
    """
    key = config_apikeys()[site]
    url = "https://fxmarketapi.com/apilive"
    symbols = "USDEUR,USDGBP,USDCNY,USDKRW,USDRUB,USDJPY,XAUUSD,XAGUSD"
    params = {"currency": symbols, "api_key": key}
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["price"]
        data = {k.replace("USD", "USD:"): float(v) for k, v in ret.items()}
        data["USD:XAG"] = 1 / data.pop("XAGUSD:")
        data["USD:XAU"] = 1 / data.pop("XAUUSD:")
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 23
0
def fxrate(site):
    """
    live forex rates scraped from fx-rate.net
    """
    def parse(raw):
        """
        extract data from xml
        """
        ret = raw.split('title="American Dollar to')[1]
        ret = ret.split("per")[0].split(";")[-1].replace(" ", "")
        return float(ret)

    def parse_metal(raw):
        """
        extract data from xml
        """
        ret = raw.split('to American Dollar Rates')[1]
        ret = ret.split("per")[0].split(";")[-1].replace(" ", "")
        return 1 / float(ret)

    try:
        data = {}
        uri = "https://fx-rate.net/conversion.php?currency=USD&currency_pair="
        symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY"]
        for symbol in symbols:
            url = uri + symbol
            raw = requests.get(url, timeout=(15, 15)).text
            data["USD:" + symbol] = parse(raw)
        symbols = ["XAG", "XAU"]
        uri = "https://fx-rate.net/conversion.php?"
        for symbol in symbols:
            endpoint = f"currency={symbol}&currency_pair=USD"
            url = uri + endpoint
            raw = requests.get(url, timeout=(15, 15)).text
            data["USD:" + symbol] = parse_metal(raw)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 24
0
def main():
    """
    primary event loop
    """
    print("\033c")
    print_logo()
    print(it("cyan", "    presents:  Gateway Sceletus"))
    gateway, do_sceletus, name, wif = user_input()
    while True:
        print("\033c")
        print_logo()
        if do_sceletus:
            print(it("cyan", "Cancelling ALL Open Orders..."))
            cancel_all_markets(name, wif)
        print("\033c")
        print_logo()
        print(it("cyan", "Gathering CEX Data..."))
        cex = cex_rates(gateway)
        print("\033c")
        print_logo()
        print(it("cyan", "Gathering DEX Data..."))
        dex = dex_rates(gateway)
        print("\033c")
        print_logo()
        print(it("cyan", "Gathering FOREX Data..."))
        forex = forex_rates()
        print("\033c")
        print_logo()
        print(it("cyan", "\n\nCEX"))
        print(cex)
        print(it("cyan", "\n\nDEX"))
        print(dex)
        print(it("cyan", "\n\nFOREX"))
        print(forex["medians"])
        race_write("pricefeed_cex.txt", cex)
        race_write("pricefeed_forex.txt", forex)
        prices = create_prices(cex, dex, forex)
        time.sleep(10)
        sceletus(prices, gateway, name, wif, do_sceletus)
        time.sleep(REFRESH - 100)
Exemplo n.º 25
0
def liveusd(site):
    """
    live forex rates scraped from liveusd.com
    """
    url = "http://liveusd.com/veri/refresh/total.php"
    try:
        ret = requests.get(url, timeout=(15, 15)).text
        ret = ret.replace(" ", "").split("\n")
        data = {}
        for item in ret:
            if item:
                try:
                    pair = item.split(":")[0].replace("USD", "USD:")
                    price = item.split(":")[1]
                    data[pair] = float(price)
                except:
                    pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 26
0
def openexchangerates(site):
    """
    https://docs.openexchangerates.org/
    https://openexchangerates.org/api/latest.json?app_id=key
    limit 1000 per month (hourly updates)
    """
    key = config_apikeys()[site]
    url = "https://openexchangerates.org/api/latest.json"
    params = {"app_id": key}
    symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY", "XAU", "XAG"]
    try:
        ret = requests.get(url, params=params,
                           timeout=(15, 15)).json()["rates"]
        data = {}
        for key, val in ret.items():
            if key in symbols:
                data["USD:" + key] = float(val)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 27
0
def currencyconverter(site):
    """
    https://free.currconv.com/api/v7/convert?q=USD_PHP&compact=ultra&apikey=key
    100/hour two pairs per request
    """
    key = config_apikeys()[site]
    url = "https://free.currconv.com/api/v7/convert"
    symbols = ["USD_EUR,USD_GBP", "USD_CNY,USD_KRW", "USD_JPY,USD_RUB"]
    try:
        data = {}
        for symbol in symbols:
            try:
                params = {"compact": "y", "apikey": key, "q": symbol}
                ret = requests.get(url, params=params, timeout=(15, 15)).json()
                for key, val in ret.items():
                    data[key.replace("_", ":")] = float(val["val"])
            except:
                pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 28
0
def finviz(site):
    """
    live forex rates scraped from finviz.com
    """
    url = "https://finviz.com/api/forex_all.ashx?timeframe=m5"
    try:
        ret = requests.get(url, timeout=(15, 15)).json()
        data = {}
        data["AUD:USD"] = float(ret["AUDUSD"]["last"])
        data["EUR:GBP"] = float(ret["EURGBP"]["last"])
        data["EUR:USD"] = float(ret["EURUSD"]["last"])
        data["GBP:JPY"] = float(ret["GBPJPY"]["last"])
        data["GBP:USD"] = float(ret["GBPUSD"]["last"])
        data["USD:CAD"] = float(ret["USDCAD"]["last"])
        data["NZD:USD"] = float(ret["NZDUSD"]["last"])
        data["USD:CHF"] = float(ret["USDCHF"]["last"])
        data["USD:JPY"] = float(ret["USDJPY"]["last"])
        data["XAG:USD"] = float(ret["SI"]["last"])
        data["XAU:USD"] = float(ret["GC"]["last"])
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 29
0
def currencyme(site):
    """
    live forex rates scraped from currency.me.uk
    """
    symbols = ["EUR", "RUB", "GBP", "KRW", "CNY", "JPY"]
    url = "https://www.currency.me.uk/remote/ER-CCCS2-AJAX.php"
    try:
        data = {}
        for symbol in symbols:
            try:
                params = {
                    "ConvertTo": symbol,
                    "ConvertFrom": "USD",
                    "amount": 1
                }
                raw = requests.get(url, params=params, timeout=(15, 15)).text
                data["USD:" + symbol] = float(raw.replace(" ", ""))
            except:
                pass
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")
Exemplo n.º 30
0
def fxempire2(site):
    """
    live forex rates scraped from fxempire (tradingview backdoor)
    ?symbol=USD-RUB&resolution=1&from=158016000&to=1580162240"
    """

    uri = "https://tradingview.fxempire.com/api/history"
    symbols = [
        "USD-CNY", "USD-RUB", "USD-EUR", "USD-GBP", "USD-KRW", "USD-JPY"
    ]
    try:
        data = {}
        for symbol in symbols:
            now = int(time.time())
            then = int((now - 200) / 10)  # note weird /10 here
            params = f"?symbol={symbol}&resolution=1&from={then}&to={now}"
            url = uri + params
            raw = requests.get(url).json()["c"][-1]
            data[symbol.replace("-", ":")] = float(raw)
        data = refine_data(data)
        print(site, data)
        race_write(f"{site}_forex.txt", json_dumps(data))
    except:
        print(f"{site} failed to load")