def eos_transfer(order): """ serialize, sign, and broadcast an order dictionary with nine keys """ # FIXME log this event timestamp() line_number() print("\nORDER\n\n", {k: v for k, v in order.items() if k != "private"}, "\n") nodes = eosio_nodes() while 1: nodes.append(nodes.pop(0)) node = nodes[0] # configure the url and port eosio_config.url = node eosio_config.port = "" print("\nADDRESS\n\n", node, "\n") # assemble the transfer operation dictionary operation = { "from": order["public"], "memo": "", # eos must have 4 decimal places formatted as string with space and "EOS" "quantity": precisely(order["quantity"], 4) + " EOS", "to": order["to"], } print("\nOPERATION\n\n", operation, "\n") # serialize the transfer operation raw = RawinputParams( "transfer", # the operation type operation, # the parameters "eosio.token", # the contract; for our purposes always "eosio.token" order["public"] + "@active", # the permitted party (or @owner) ) print("\nSERIALIZE\n\n", raw.params_actions_list, "\n") # sign the transfer operation params = EosioParams(raw.params_actions_list, order["private"]) print("\nSIGN\n\n", params.trx_json, "\n") # broadcast the transfer to the network try: ret = NodeNetwork.push_transaction(params.trx_json) print("\nBROADCAST\n\n", ret) if "processed" not in ret.keys(): raise ValueError("NOT PROCESSED") print(it("red", "EOS TRANSFERRED")) break except Exception as error: print(error) print(it("red", "BROADCAST FAILED"), node, "attempting new api...") continue return ret
def print_results(forex): """ pretty print the forex price data to terminal """ print("\nForeign Exchange Rates\n") print("markets") print(ret_markets(), "\n") for key, val in forex["sources"].items(): print(it("yellow", key)) pprint(val) print("\n", it("purple", "aggregates")) pprint(forex["aggregate"]) print("\n", it("blue", "medians")) for key, val in forex["medians"].items(): print(key, str(val[0]).ljust(13), str(val[1]).ljust(3), val[2])
def print_options(options): """ Print a table of Operation ID options :param dict(options) static bitshares operation ids """ print(it("yellow", "\n\n Operation ID Numbers\n")) msg = "" for idx in range(30): msg += " " + str(idx).ljust(4) + str(options[idx]).ljust(30) try: msg += str(idx + 30).ljust(4) + str(options[idx + 30]) except Exception: pass msg += "\n" print(it("yellow", msg)) print(it("green", "\n\n Enter ID number(s)"))
def wwc(): """ Winnowing Websocket Connections... """ print("\033c") # cache = logo(cache) print( it( "cyan", """ +===============================+ ╦ ╦ ╔═╗ ╔╗╔ ╔═╗ ╔═╗ ╔╦╗ ╠═╣ ║ ║ ║║║ ║╣ ╚═╗ ║ ╩ ╩ ╚═╝ ╝╚╝ ╚═╝ ╚═╝ ╩ MARKET - PEGGED - ASSETS +===============================+ The right of nullification is a natural right, which all admit to be a remedy against insupportable oppression. $$$ James Madison $$$ If it had not been for the justice of our cause, and the consequent interposition of Providence, in which we had faith, we must have been ruined. $$$ Ben Franklin $$$ Resistance and disobedience in economic activity is the most moral human action possible. $$$ Samuel E. Konkin III $$$ """, )) print("") print(ctime(), "\n") print(wwc.__doc__, "\n")
def print_op(op): """ At the end of the main while loop we'll perform some action on every operation as a sample, we'll just color some operations and print the op :param list(op): op[0] is transaction type number and op[1] is the transaction :return None: """ msg = op[1] if op[0] == 0: # transfer msg = it("purple", msg) print(msg, "\n") if op[0] == 1: # limit order create msg = it("green", msg) print(msg, "\n") if op[0] == 2: # limit order cancel msg = it("red", msg) print(msg, "\n")
def user_input(): """ initialize script with user inputs """ print("\nChoose RUDEX, XBTSX, or DEEX\n") gateway = input("enter gateway: ").upper() do_sceletus = input("\n to SCELETUS" + it("cyan", " y + Enter ") + "or just Enter for Demo\n\n ").lower() wif, name = "", "" if do_sceletus == "y": name = input("\n Bitshares" + it("yellow", " AGENT NAME:\n\n ")) wif = getpass("\n Bitshares" + it("yellow", " AGENT WIF:\n ")) print(" *****************") return gateway, do_sceletus, name, wif
def main(): """ primary event circuit """ nodes = race_read_json(doc="nodes.txt") print("\033c") print("\n\nCANCEL ALL OPEN ORDERS IN ALL DEX MARKETS\n\n") account_name = input("\n Bitshares" + it("yellow", " AGENT NAME:\n\n ")) rpc = reconnect(None) account_id = rpc_account_id(rpc, account_name) print("\n\n", account_name, account_id, "\n\n") orders = rpc_open_orders(rpc, account_name) orders = list(set(orders)) orders.sort() if orders: print(orders, "\n\n") user_resp = input("proceed to cancel these orders? y/n ").lower() if user_resp == "y": wif = getpass("\n Bitshares" + it("yellow", " AGENT WIF:\n ")) print(" *****************") time.sleep(2) order = { "edicts": [ { "op": "cancel", "ids": orders }, ], "header": { "asset_id": "1.3.1", # placeholder "currency_id": "1.3.1", # placeholder "asset_precision": 5, # placeholder "currency_precision": 5, # placeholder "account_id": account_id, "account_name": account_name, "wif": wif, }, "nodes": nodes, } broker(order) else: print("\n\nThere are no open orders to cancel\n\n")
def print_logo(): """ +===============================+ ╦ ╦ ╔═╗ ╔╗╔ ╔═╗ ╔═╗ ╔╦╗ ╠═╣ ║ ║ ║║║ ║╣ ╚═╗ ║ ╩ ╩ ╚═╝ ╝╚╝ ╚═╝ ╚═╝ ╩ MARKET - PEGGED - ASSETS +===============================+ """ print(it("green", print_logo.__doc__))
def api_server(): """ spawn a run forever api server instance and add routing information """ app = App() app.add_route("/gateway", GatewayDepositServer()) print(it("red", "INITIALIZING DEPOSIT SERVER")) print("serving http at") call(["hostname", "-I"]) with make_server("", 8000, app) as httpd: httpd.serve_forever()
def xrp_transfer(order): """ pretty wrap the asyncio xrp transfer """ # FIXME log this event timestamp() line_number() print("\nORDER\n\n", {k: v for k, v in order.items() if k != "private"}, "\n") event = asyncio.get_event_loop().run_until_complete(xrp_transfer_execute(order)) print(it("red", "XRP TRANSFERRED")) return event
def main(): """ initialize final aggregation and publication event loop """ print("\033c") print_logo() trigger = {} trigger["feed"] = input( "\n to PUBLISH" + it("cyan", " y + Enter ") + "or Enter to skip\n\n " ).lower() trigger["jsonbin"] = input( "\n to JSONBIN" + it("cyan", " y + Enter ") + "or Enter to skip\n\n " ).lower() trigger["sceletus"] = input( "\n to SCELETUS" + it("cyan", " y + Enter ") + "or Enter to skip\n\n " ).lower() trigger["cancel"] = input( "\n to CANCEL" + it("cyan", " y + Enter ") + "or Enter to skip\n\n " ).lower() wif, name = "", "" if trigger["feed"].lower() == "y" or (trigger["sceletus"] == "y"): name = input("\n Bitshares" + it("yellow", " AGENT NAME:\n\n ")) wif = getpass("\n Bitshares" + it("yellow", " AGENT WIF:\n ")) print(" *****************") gather_data(name, wif, trigger)
def print_market(storage, cache): # DONE """ pricefeed_dex header containing with cached values """ print("\033c") print_logo() print("") print( ctime(), it("purple", ("%.5f" % storage["access"])), "read", it("purple", ("%.1f" % storage["data_latency"])), "data", ) print("==================================================") currencies = [] for key, val in cache["currency_id"].items(): currencies.append((key, val, cache["currency_precision"][key])) print("CURRENCIES: ", currencies) print("ASSET: ", cache["asset"], cache["asset_id"], cache["asset_precision"]) print("==================================================") print("")
def recycler(): """ in a background process, check incoming accounts & move funds to outbound accounts """ networks = ["eos", "xrp"] print(it("red", f"INITIALIZING RECYCLER\n"), "networks:", networks, "\n") while 1: for network in networks: order = {} # EOS specific parameters if network == "eos": nil = NIL_EOS get_balance = eos_balance transfer = eos_transfer # XRP specific parameters elif network == "xrp": nil = NIL_XRP get_balance = xrp_balance transfer = xrp_transfer # recycle gateway incoming transfers to the outbound account for idx, gate in enumerate(GATE[network]): if idx: balance = get_balance(gate["public"]) if balance > nil: timestamp() line_number() print(it("red", f"{network} RECYCLER")) print(gate["public"], balance, "\n") # finalize the order order["private"] = gate["private"] order["public"] = gate["public"] order["to"] = GATE[network][0]["public"] order["quantity"] = balance - nil # serialize, sign, and broadcast print(transfer(order), "\n") time.sleep(60)
def choice(): """ Welcome and user input for stand alone listener app :return int(selection): operation id type number """ print("\033c") print(it("blue", block_ops_logo())) print( it( "green", """ Enter an Operation ID to stream below you can also enter a comma seperated list of ID's or Enter the letter "A" for All or press Enter for demo of Operations 0, 1, and 2 """, )) operations = raw_operations() print_options(operations) selection = [0, 1, 2] user_select = input("\n\n") try: # if the user entered an ID number selection = [int(user_select)] except Exception: pass try: if "," in user_select: # if the user enter a list of numbers, attempt json conversion selection = json_loads( '["' + user_select.replace(",", '","').replace(" ", "") + '"]') selection = [int(k) for k in selection] except Exception: pass if user_select.lower() == "a": selection = list(operations.keys()) print("\033c") print(it("blue", block_ops_logo())) print( it( "green", "\n BitShares Block Operations Listener\n" + "\n operation(s) selected: \n", )) print(it("blue", " " + str(selection) + "\n")) for k in selection: print(" ", (operations[k])) print( it("green", "\n\n fetching latest irreversible block number...\n")) return selection
def main(): """ primary event loop """ print("\033c") print_logo() print(it("cyan", " presents: Gateway Sceletus")) gateway, do_sceletus, name, wif = user_input() while True: print("\033c") print_logo() if do_sceletus: print(it("cyan", "Cancelling ALL Open Orders...")) cancel_all_markets(name, wif) print("\033c") print_logo() print(it("cyan", "Gathering CEX Data...")) cex = cex_rates(gateway) print("\033c") print_logo() print(it("cyan", "Gathering DEX Data...")) dex = dex_rates(gateway) print("\033c") print_logo() print(it("cyan", "Gathering FOREX Data...")) forex = forex_rates() print("\033c") print_logo() print(it("cyan", "\n\nCEX")) print(cex) print(it("cyan", "\n\nDEX")) print(dex) print(it("cyan", "\n\nFOREX")) print(forex["medians"]) race_write("pricefeed_cex.txt", cex) race_write("pricefeed_forex.txt", forex) prices = create_prices(cex, dex, forex) time.sleep(10) sceletus(prices, gateway, name, wif, do_sceletus) time.sleep(REFRESH - 100)
def main(): """ setting state of all inbound accounts to available subprocess auto send all inbound funds to outbound account subprocess deposit api server subprocess bitshares withdrawal listener """ print("\033c\n") print(it("yellow", logo())) # initialize the the pipe folder of *txt files json_ipc(initialize=True) # set state machine to "all incoming accounts available" initialize_addresses("xrp") initialize_addresses("eos") # spawn 3 concurrent gateway subprocesses recycling() time.sleep(0.2) deposit_api_server() time.sleep(0.2) withdraw_listener_bitshares()
def listener_bitshares(selection=None): """ primary listener event loop :param int(selection) or None: user choice for demonstration of listener :run forever: """ # get node list from github repo for bitshares ui staging; write to file nodes = bitshares_nodes() options = raw_operations() json_ipc(doc="nodes.txt", text=json_dumps(nodes)) # create a subfolder for the database; write to file create_database() # initialize block number last_block_num = curr_block_num = 0 # bypass user input... gateway transfer ops act = print_op if selection is None: selection = 0 act = withdraw # spawn subprocesses for gathering streaming consensus irreversible block number spawn_block_num_processes() # continually listen for last block["transaction"]["operations"] print(it("red", "\nINITIALIZING WITHDRAWAL LISTENER\n\n")) while True: try: # get the irreversible block number reported by each maven subprocess block_numbers = [] for maven_id in range(BLOCK_NUM_MAVENS): block_num = json_ipc(doc=f"block_num_maven_{maven_id}.txt")[0] block_numbers.append(block_num) # the current block number is the statistical mode of the mavens # NOTE: may throw StatisticsError when no mode curr_block_num = mode(block_numbers) # print(curr_block_num) json_ipc(doc=f"block_number.txt", text=json_dumps([ curr_block_num, ])) # if the irreverisble block number has advanced if curr_block_num > last_block_num: print( "\033[F", # go back one line it("blue", "BitShares Irreversible Block"), it("yellow", curr_block_num), time.ctime()[11:19], it("blue", int(time.time())), ) if last_block_num > 0: # not on first iter # spawn some new mavens to get prospective block data start = last_block_num + 1 stop = curr_block_num + 1 spawn_block_processes(start, stop) # inititialize blocks as a dict of empty transaction lists blocks = {} for block_num in range(start, stop): blocks[block_num] = [] # get block transactions from each maven subprocesses for maven_id in range(BLOCK_MAVENS): # print(maven_id) maven_blocks = json_ipc( doc=f"block_maven_{maven_id}.txt") # for each block that has past since last update for block_num in range(start, stop): # print(block_num) # get the maven's version of that block from the dictionary # NOTE: may throw KeyError, TODO: find out why? maven_block = maven_blocks[str(block_num)] # append that version to the list # of maven opinions for that block number blocks[block_num].append(json_dumps(maven_block)) # get the mode of the mavens for each block in the blocks dict # NOTE: may throw StatisticsError when no mode # for example half the nodes are on the next block number blocks = { k: json_loads(mode(v)) for k, v in blocks.items() } # triple nested: # for each operation, in each transaction, on each block for block_num, transactions in blocks.items(): for item, trx in enumerate(transactions): for op in trx["operations"]: # add the block and transaction numbers to the operation op[1]["block"] = block_num op[1]["trx"] = item + 1 op[1]["operation"] = (op[0], options[op[0]]) # spin off withdrawal act so listener can continue process = Process(target=act, args=(op, )) process.start() last_block_num = curr_block_num time.sleep(6) # statistics and key errors can be safely ignored, restart loop except (StatisticsError, KeyError): continue # in all other cases provide stack trace except Exception as error: print("\n\n", it("yellow", error), "\n\n") print(traceback.format_exc(), "\n") continue
def thresh(storage, process, epoch, pid, cache): # DONE """ Make calls for data, shake out any errors There are 20 threshing process running in parallel They are each periodically terminated and respawned """ handshake_bs = [] ping_bs = [] block_bs = [] reject_bs = [] storage["access"] = 0 storage["data_latency"] = 0 while True: storage["mean_ping"] = 0.5 try: nodes = get_nodes() static_nodes = public_nodes() shuffle(nodes) node = nodes[0] storage["bw_depth"] = max(int(len(nodes) / 6), 1) # CHECK BLACK AND WHITE LISTS black = race_read(doc="blacklist.txt")[-storage["bw_depth"]:] white = race_read(doc="whitelist.txt")[-storage["bw_depth"]:] try: start = time() metanode = bitshares_trustless_client() storage["access"] = time() - start ping = storage["mean_ping"] = metanode["ping"] blacklist = metanode["blacklist"][-storage["bw_depth"]:] whitelist = metanode["whitelist"][-storage["bw_depth"]:] blocktime = metanode["blocktime"] storage["data_latency"] = time() - blocktime del metanode if len(blacklist) > len(black): black = blacklist race_write("blacklist.txt", json_dump(black)) if len(whitelist) > len(white): white = whitelist race_write("whitelist.txt", json_dump(white)) except BaseException: pass if node in black: raise ValueError("blacklisted") if node in white: raise ValueError("whitelisted") # connect to websocket rpc, handshake_latency, handshake_max = wss_handshake( storage, node) # use each node several times utilizations = UTILIZATIONS if (time() - cache["begin"]) < 100: utilizations = 1 for util in range(utilizations): sleep(THRESH_PAUSE) # Database calls w/ data validations ping_latency, ping_max = rpc_ping_latency(rpc, storage) block_latency, block_max, blocktime = rpc_block_latency( rpc, storage) set_timing = " " + "speed/max/ratio/cause/rate" if handshake_max == 5: set_timing = " " + it( "cyan", "RESOLVING MEAN NETWORK SPEED") # timing analysis for development ping_r = ping_latency / ping_max block_r = block_latency / block_max handshake_r = handshake_latency / handshake_max ping_b = int(bool(int(ping_r))) block_b = int(bool(int(block_r))) handshake_b = int(bool(int(handshake_r))) reject_b = int(bool(ping_b + block_b + handshake_b)) ping_bs.append(ping_b) block_bs.append(block_b) reject_bs.append(reject_b) handshake_bs.append(handshake_b) ping_bs = ping_bs[-100:] block_bs = block_bs[-100:] reject_bs = reject_bs[-100:] handshake_bs = handshake_bs[-100:] ping_p = sum(ping_bs) / max(1, len(ping_bs)) block_p = sum(block_bs) / max(1, len(block_bs)) reject_p = sum(reject_bs) / max(1, len(reject_bs)) handshake_p = sum(handshake_bs) / max(1, len(handshake_bs)) ping_b = str(ping_b).ljust(7) block_b = str(block_b).ljust(7) handshake_b = str(handshake_b).ljust(7) reject = "".ljust(7) if reject_b: reject = it("cyan", "X".ljust(7)) optimizing = it("cyan", "OPTIMIZING".ljust(7)) if (time() - cache["begin"]) > 200: optimizing = "".ljust(7) # last, history, orderbook, balances, orders last = rpc_last(rpc, cache) # print(last) now = to_iso_date(time()) then = to_iso_date(time() - 3 * 86400) ids = [cache["asset_id"], cache["currency_id"]] precisions = [ cache["asset_precision"], cache["currency_precision"] ] # CPU, RAM, io_count data REQUIRES MODULE INSTALL try: proc = psutil_Process() descriptors = proc.num_fds() usage = ("grep 'cpu ' /proc/stat | awk " + "'{usage=($2+$4)*100/($2+$4+$5)}" + " END {print usage }' ") cpu = "%.3f" % (float(popen(usage).readline())) ram = "%.3f" % (100 * float(proc.memory_percent())) io_count = list(proc.io_counters())[:2] except Exception as error: if DEV: print(trace(error)) metanode = bitshares_trustless_client() m_last = {} ping = 0.5 keys = ["bifurcating the metanode...."] try: keys = metanode["keys"] ping = storage["mean_ping"] = metanode["ping"] m_last = metanode["last"] except BaseException: pass del metanode try: cex = race_read("pricefeed_cex.txt") forex = race_read("pricefeed_forex.txt") final = race_read("pricefeed_final.txt") except: pass # aggregate gateway:bts data usds = [] btcs = [] usd_dict = {} btc_dict = {} try: for key, val in m_last.items(): if "BTC" in key: btcs.append(val) btc_dict[key] = val elif "USD" in key: usds.append(val) usd_dict[key] = val # eliminate outliers usd = median(usds) btc = median(btcs) # calculate the gateway btcusd for reference only implied_btcusd = usd / btc except: print(it("cyan", "WARN: GATHERING PRICES")) sceletus_output = {} try: sceletus_output = race_read(doc="sceletus_output.txt") except: pass try: honest_cross_rates = race_read( doc="honest_cross_rates.txt") except: pass runtime = int(time()) - cache["begin"] # storage['bw_depth'] = max(int(len(nodes) / 6), 1) if (len(white) < storage["bw_depth"]) or (len(black) < storage["bw_depth"]): alert = it("cyan", " BUILDING BLACK AND WHITE LISTS") else: alert = "" if nodes == static_nodes: alert += " ::WARN:: USING STATIC NODE LIST" upm = 0 try: upm = len(storage["updates"]) except: pass # in the event data passes all tests, then: # print, winnow the node, and nascent trend the maven print_market(storage, cache) print(keys) print("") print("runtime:epoch:pid:upm", it("green", runtime), epoch, pid, upm) try: print("fds:processes ", descriptors, process, "of", PROCESSES) except BaseException: print("processes: ", process, "of", PROCESSES) try: print("cpu:ram:io_count ", cpu, ram, io_count) except BaseException: pass print("utilization:node ", str(util + 1).ljust(3), node) print( "total:white:black ", len(static_nodes), len(nodes), len(white), len(black), alert, ) print(set_timing) print( "block latency ", "%.2f %.1f %.1f %s %.2f" % (block_latency, block_max, block_r, block_b, block_p), ) print( "handshake ", "%.2f %.1f %.1f %s %.2f" % ( handshake_latency, handshake_max, handshake_r, handshake_b, handshake_p, ), ) print( "ping ", "%.2f %.1f %.1f %s %.2f" % (ping_latency, ping_max, ping_r, ping_b, ping_p), ) print( "mean ping ", (it("purple", ("%.3f" % ping))), " %s %.2f" % (reject, reject_p), optimizing, ) print("") try: print( "DEX BTS:BTC", it("cyan", ("%.16f" % btc)), it("purple", btc_dict), ) # json_dump(btc_dict, indent=0, sort_keys=True))) print( "DEX BTS:USD", "%.16f" % usd, it("purple", usd_dict), ) print( "DEX BTC:USD", it("yellow", ("%.4f" % implied_btcusd)), "(IMPLIED)", ) except: pass try: for key, val in cex.items(): print( "CEX", key, it("cyan", ("%.8f" % val["median"])), { k: ("%.8f" % v["last"]) for k, v in val["data"].items() }, ) print( "\nFOREX " + "inverse ::: pair ::: min ::: mid ::: max ::: qty ::: source" ) for key, val in forex["medians"].items(): fxdata = [i[0] for i in forex["aggregate"][key]] print( it("cyan", str(sigfig(1 / val[0])).rjust(12)), it("green", key), str(min(fxdata)).ljust(11), it("cyan", str(val[0]).ljust(11)), str(max(fxdata)).ljust(11), len(fxdata), " ".join( [i[1][:4] for i in forex["aggregate"][key]]), ) print( "FINAL INVERSE", {k: sigfig(v) for k, v in final["inverse"].items()}, ) print("FINAL FEED", it("green", final["feed"])) print("FEED CLOCK", it("yellow", final["time"])) try: print("HONEST CROSS RATES", honest_cross_rates) except: pass try: print( "SCELETUS ", it("purple", sceletus_output), ) except: pass stale = time() - final["time"]["unix"] if stale > 4000: print( it( "red", f"WARNING YOUR FEED IS STALE BY {stale} SECONDS" )) except: pass # send the maven dictionary to nascent_trend() # Must be JSON type # 'STRING', 'INT', 'FLOAT', '{DICT}', or '[LIST]' maven = {} maven["ping"] = (19 * storage["mean_ping"] + ping_latency) / 20 # FLOAT maven["last"] = last # precision() STRING maven["whitelist"] = white # LIST maven["blacklist"] = black # LIST maven["blocktime"] = blocktime # INT nascent_trend(maven) # winnow this node to the whitelist winnow(storage, "whitelist", node) # clear namespace del maven del last del io_count del alert del ram del cpu del keys del now del runtime del descriptors del proc try: sleep(0.0001) rpc.close() except Exception as error: if DEV: print(trace(error)) continue except Exception as error: try: if DEV: print(trace(error)) sleep(0.0001) rpc.close() except BaseException: pass try: msg = trace(error) + node if (("ValueError" not in msg) and ("StatisticsError" not in msg) and ("result" not in msg) and ("timeout" not in msg) and ("SSL" not in msg)): if (("WebSocketTimeoutException" not in msg) and ("WebSocketBadStatusException" not in msg) and ("WebSocketAddressException" not in msg) and ("ConnectionResetError" not in msg) and ("ConnectionRefusedError" not in msg)): msg += "\n" + str(format_exc()) if DEV: # or ((time() - cache["begin"]) > 60): print(msg) if "listed" not in msg: race_append(doc="metanodelog.txt", text=msg) winnow(storage, "blacklist", node) del msg except BaseException: pass continue
def sceletus(prices, gateway, name, wif, do_sceletus): """ update the historic chart on the blockchain using buy/sell broker(order) method for each pair to be skeleton'd, for each agent on opposing sides of the book """ orders = [] order_dict = {} header = {} # build qty_rate dict qty_rate = create_qty_rate(prices, gateway) print("\033c") print_logo() print("\n\nQTY_RATE") for k, v in qty_rate.items(): print(k, v, it("cyan", sigfig(1 / v["rate"]))) time.sleep(5) print(it("green", "Begin sceletus buy/sell ops...")) time.sleep(5) account_id = "" if do_sceletus: rpc = reconnect(None) account_id = rpc_lookup_accounts(rpc, {"account_name": name}) for pair in qty_rate.keys(): amount = sigfig(qty_rate[pair]["qty"]) price = sigfig(qty_rate[pair]["rate"]) order_dict[pair] = [amount, price] # sort pair into dict of asset and currency asset = pair.split(":")[0] currency = pair.split(":")[1] pair_dict = { "asset": asset, "currency": currency, } # flash the curreny pair being bought/sold msg = "\033c\n\n\n" msg += it("red", "SCELETUS") msg += "\n\n\n" for _ in range(50): msg += ("\n " + asset + " : " + currency + " @ " + it("cyan", price) + " qty " + it("yellow", amount)) print(msg) # make rpc for A.B.C id's and precisions ( asset_id, asset_precision, currency_id, currency_precision, ) = rpc_lookup_asset_symbols(rpc, pair_dict) # update the header respectively header["asset_id"] = asset_id header["currency_id"] = currency_id header["asset_precision"] = asset_precision header["currency_precision"] = currency_precision # perform buy ops for agents[0] and sell ops for agents[1] for idx in range(2): # build the header with correct account info header["account_name"] = name header["account_id"] = account_id header["wif"] = wif operation = ["buy", "sell"][idx] # build the final edict for this agent*pair with operation and qty_rate edict = { "op": operation, "amount": amount, "price": price, "expiration": 0, } # fetch the nodes list and shuffle nodes = race_read_json("nodes.txt") shuffle(nodes) # build the order with agent appropriate header and edict for this pair order = { "header": header, "edicts": [edict], "nodes": nodes, } # print the outcome and if not a demo, then live execution orders.append({ "time": time.ctime(), "unix": int(time.time()), "name": name, "op": operation, "pair": pair, "price": price, "amount": amount, }) if do_sceletus: broker(order) if do_sceletus: print("\033c") print_logo() print(it("cyan", "OPEN ORDERS")) rpc_open_orders(rpc, name) return orders, order_dict
def withdraw(op): """ in production print_op is replaced with withdraw The user has returned some UIA to the issuer! upon hearing an on chain UIA transfer to the gateway with memo from this definition we trigger a gateway withdrawal event release the user's foreign chain funds to the memo and burn the returned UIA upon irreversible receipt """ # if its a transfer to gateway with a memo tgm = False if op[0] == 0: # transfer if op[1]["to"] in [ GATE["uia"]["eos"]["issuer_id"], GATE["uia"]["xrp"]["issuer_id"], ]: print(it("yellow", "gate uia transfer")) if "memo" in op[1].keys(): print( it("red", "TRANSFER TO GATEWAY WITH MEMO\n\n"), it("yellow", op), "\n", ) tgm = True else: print(it("red", "WARN: transfer to gateway WITHOUT memo")) if tgm: timestamp() line_number() order = {} # extract the asset_id of the transfer uia_id = op[1]["amount"]["asset_id"] print("uia_id", uia_id, "\n") # EOS specific parameters if uia_id == GATE["uia"]["eos"]["asset_id"]: network = "eos" verify = verify_eosio_account listen = listener_eosio transfer = eos_transfer # eos transfers require a url order["url"] = eosio_nodes()[ 0] # FIXME what happens if node fails? # XRP specific parameters elif uia_id == GATE["uia"]["xrp"]["asset_id"]: network = "xrp" verify = verify_ripple_account listen = listener_ripple transfer = xrp_transfer memo = op[1][ "memo"] # dict with keys("from", "to", "nonce", "message") order["private"] = GATE[network][0]["private"] order["public"] = GATE[network][0]["public"] # convert graphene operation amount to human readable order["quantity"] = (op[1]["amount"]["amount"] / 10**GATE["uia"][network]["asset_precision"]) # decode the client's memo using the issuers private key order["to"] = ovaltine(memo, GATE["uia"][network]["issuer_private"]) print(f"decoded {network} client", order["to"], "\n") # confirm we're dealing with a legit client address if verify(order["to"]): listener = Process( target=listen, args=( 0, order["quantity"], "reserve", # issuer_action None, # # always None for reserve ), ) # upon hearing real foreign chain transfer, reserve the uia equal listener.start() print( it( "red", f"STARTING {network} LISTENER TO RESERVE {order['quantity']}\n", )) # wait for listener subprocess to warm up then transfer the order time.sleep(30) timestamp() line_number() print(transfer(order)) else: print( it("red", f"WARN: memo is NOT a valid {network} account name\n"))
def gather_data(name, wif, trigger): """ primary event loop """ # purge the IPC text pipe race_write("pricefeed_final.txt", {}) race_write("pricefeed_forex.txt", {}) race_write("pricefeed_cex.txt", {}) race_write("pricefeed_dex.txt", {}) race_write("sceletus_output.txt", []) race_write("honest_cross_rates.txt", {}) race_write("feed.txt", {}) # begin the dex pricefeed (metanode fork) dex_process = Process(target=pricefeed_dex) dex_process.daemon = False dex_process.start() # dex_process.join(10) dex = {} # wait until the first dex pricefeed writes to file while dex == {}: dex = race_read_json("pricefeed_dex.txt") updates = 1 while True: try: # collect forex and cex data forex = pricefeed_forex() # takes about 30 seconds cex = pricefeed_cex() # takes about 30 seconds # read the latest dex data dex = race_read_json("pricefeed_dex.txt") # localize forex rates usdcny = forex["medians"]["USD:CNY"][0] # usdeur = forex["medians"]["USD:EUR"][0] # usdgbp = forex["medians"]["USD:GBP"][0] # usdrub = forex["medians"]["USD:RUB"][0] # usdjpy = forex["medians"]["USD:JPY"][0] # usdkrw = forex["medians"]["USD:KRW"][0] usdxau = forex["medians"]["USD:XAU"][0] usdxag = forex["medians"]["USD:XAG"][0] # localize cex rates btcusd = cex["BTC:USD"]["median"] cex_btsbtc = cex["BTS:BTC"]["median"] cex_btsbtc_list = [] for key, val in cex["BTS:BTC"]["data"].items(): cex_btsbtc_list.append(val["last"]) # attain dex BTS:BTC median dex_btsbtc_list = [v for k, v in dex["last"].items() if "BTC" in k] dex_btsbtc = median(dex_btsbtc_list) # finalize btsbtc by taking median of all cex and dex btsbtc prices btsbtc = median(dex_btsbtc_list + cex_btsbtc_list) # create feed prices for crypto altcoins: LTC, ETH, XRP # btcltc = 1/cex["LTC:BTC"]["median"] btceth = 1 / cex["ETH:BTC"]["median"] btcxrp = 1 / cex["XRP:BTC"]["median"] # btsltc = btsbtc * btcltc btseth = btsbtc * btceth btsxrp = btsbtc * btcxrp # create implied bts us dollar price btsusd = btsbtc * btcusd # create implied bts priced in forex terms feed = { "BTC:ETH": btceth, "BTC:XRP": btcxrp, "BTS:ETH": btseth, "BTS:XRP": btsxrp, "BTS:BTC": btsbtc, "BTS:USD": btsusd, "BTS:CNY": (btsusd * usdcny), # "BTS:EUR": (btsusd * usdeur), # "BTS:GBP": (btsusd * usdgbp), # "BTS:RUB": (btsusd * usdrub), # "BTS:JPY": (btsusd * usdjpy), # "BTS:KRW": (btsusd * usdkrw), "BTS:XAU": (btsusd * usdxau), "BTS:XAG": (btsusd * usdxag), } feed = {k: sigfig(v) for k, v in feed.items()} # forex priced in bts terms; switch symbol and 1/price inverse_feed = { (k[-3:] + ":" + k[:3]): sigfig(1 / v) for k, v in feed.items() } # aggregate full price calculation for jsonbin.io current_time = { "unix": int(time.time()), "local": time.ctime() + " " + time.strftime("%Z"), "utc": time.asctime(time.gmtime()) + " UTC", "runtime": int(time.time() - BEGIN), "updates": updates, } prices = { "time": current_time, "cex": cex, "dex": dex, "forex": forex, "inverse": inverse_feed, "feed": feed, } # update final output on disk race_write(doc="feed.txt", text=feed) race_write(doc="pricefeed_final.txt", text=json_dumps(prices)) # publish feed prices to the blockchain if trigger["feed"] == "y": time.sleep(3) print("\n", it("red", "PUBLISHING TO BLOCKCHAIN")) time.sleep(5) publish_feed(prices, name, wif) # upload production data matrix to jsonbin.io if trigger["jsonbin"] == "y": time.sleep(3) print("\n", it("red", "UPLOADING TO JSONBIN")) time.sleep(5) update_jsonbin(prices) # buy/sell reference rates with two accounts msg = "DEMO SCELETUS REFERENCE RATES" if trigger["sceletus"] == "y": if trigger["cancel"] == "y": time.sleep(3) print("\n", it("red", "CANCEL ALL IN ALL MARKETS")) time.sleep(5) cancel_all_markets(name, wif) msg = msg.replace("DEMO ", "") time.sleep(3) print("\n", it("red", msg)) time.sleep(5) sceletus_orders, sceletus_output = sceletus( prices, name, wif, trigger["sceletus"] ) race_append("sceletus_orders.txt", ("\n\n" + json_dumps(sceletus_orders))) race_write("sceletus_output.txt", json_dumps(sceletus_output)) appendage = ( "\n" + str(int(time.time())) + " " + time.ctime() + " " + str(feed) ) race_append(doc="feed_append.txt", text=appendage) updates += 1 time.sleep(REFRESH) except Exception as error: print(error) time.sleep(10) # try again in 10 seconds
def listener_eosio( account_idx=0, amount=None, issuer_action=None, client_id=None, nonce=0 ): """ for every block from initialized until detected check for transaction to the gateway issue or reserve uia upon receipt of gateway transfer :param int(account_idx) # from gateway_state.py :param float(amount) :param str(issuer_action) # None in unit test case :param str(client_id) #1.2.X :return None: """ gateway = GATE["eos"][account_idx]["public"] uia = GATE["uia"]["eos"]["asset_name"] start_block_num = get_irreversible_block() checked_blocks = [start_block_num] print("Start Block:", start_block_num, "\n") # block["transactions"][0]["trx"]["transaction"]["actions"][0] holds: # ["name"] # str("transfer") etc. # ["data"] # dict.keys() [to, from, quantity] start = time.time() while 1: elapsed = time.time() - start if elapsed > DEPOSIT_TIMEOUT: print( f"nonce {nonce}", it("red", f"{nonce} EOS GATEWAY TIMEOUT"), gateway, "\n", ) # 10 minutes after timeout, release the address if issuer_action == "issue": unlock_address("eos", account_idx, DEPOSIT_PAUSE) break # get the latest irreversible block number current_block = get_irreversible_block() # get the latest block number we checked max_checked_block = max(checked_blocks) # if there are any new irreversible blocks if current_block > max_checked_block + 1: new_blocks = range(max_checked_block + 1, current_block) # eosio has a 0.5 second block time, to prevail over network latency: # *concurrently* fetch all new blocks block_processes = {} # dictionary of multiprocessing "Process" events blocks_pipe = {} # dictionary of multiprocessing "Value" pipes # spawn multpiple processes to gather the "new" blocks for block_num in new_blocks: manager = Manager() blocks_pipe[block_num] = manager.Value(c_wchar_p, "") block_processes[block_num] = Process( target=get_block, args=(block_num, blocks_pipe,) ) block_processes[block_num].start() # join all subprocess back to main process; wait for all to finish for block_num in new_blocks: block_processes[block_num].join() # extract the blocks from each "Value" in blocks_pipe blocks = {} for block_num, block in blocks_pipe.items(): # create block number keyed dict of block data dicts blocks[block_num] = block.value # with new cache of blocks, check every block from last check till now for block_num in new_blocks: print( f"nonce {nonce}", it("purple", "Eosio Irreversible Block"), it("yellow", block_num), time.ctime()[11:19], it("purple", int(time.time())), "\n", ) # get each new irreversible block block = blocks[block_num] transactions = [] try: transactions = block["transactions"] except Exception: pass # iterate through all transactions in the list of transactions for trx in transactions: actions = [] try: actions = trx["trx"]["transaction"]["actions"] except Exception: pass # if there are any, iterate through the actions for action in actions: try: # sort by tranfer ops if ( action["name"] == "transfer" # SECURITY: ensure it is the correct contract!!! and action["account"] == "eosio.token" ): # extract transfer op data qty = action["data"]["quantity"] trx_to = action["data"]["to"] trx_from = action["data"]["from"] trx_asset = qty.split(" ")[1].upper() trx_amount = float(qty.split(" ")[0]) # sort again by > nil amount of eos if trx_amount > 0.0001 and trx_asset == "EOS": # during unit testing # if issuer_action is None: if DEV: print(f"nonce {nonce}", block_num, action, "\n") # if there are any transfers listed if gateway in [trx_from, trx_to]: timestamp() line_number() print( f"nonce {nonce}", it("red", "GATEWAY TRANSFER DETECTED\n"), f"amount {trx_amount} {trx_asset}\n", f"from {trx_from}\n", f"to {trx_to}\n", ) # issue UIA to client_id # upon receipt of their foreign funds if ( issuer_action == "issue" and trx_to == gateway ): print( f"nonce {nonce}", it( "red", f"ISSUING {trx_amount} {uia} to " + f"{client_id}\n", ), ) issue("eos", trx_amount, client_id) # unlock the deposit address after some time delay = ( DEPOSIT_TIMEOUT - elapsed + DEPOSIT_PAUSE ) unlock_address("eos", account_idx, delay) return # when returning foreign funds to client, # upon receipt, reserve equal in UIA if ( issuer_action == "reserve" and trx_from == gateway and trx_amount == amount ): print( f"nonce {nonce}", it( "red", f"RESERVING {amount} {uia}\n" ), ) reserve("eos", trx_amount) return except Exception: print(f"nonce {nonce}", "action", action, "\n") print(traceback.format_exc(), "\n") if block_num not in checked_blocks: checked_blocks.append(block_num)
def sceletus(prices, name, wif, do_sceletus): """ update the historic chart on the blockchain using buy/sell broker(order) method for each pair to be skeleton'd, for each agent on opposing sides of the book """ orders = [] order_dict = {} header = {} tick = prices["time"]["updates"] # create a list of all pairs to be skeleton'd pairs = create_pairs(tick) # websocket handshake rpc = reconnect(None) # remote procedure price of bts to Bitassets rates bitassets = fetch_bts_bitassets(rpc) account_id = "" if do_sceletus: account_id = rpc_lookup_accounts(rpc, {"account_name": name}) # build qty_rate dict qty_rate = create_qty_rate(prices, bitassets) # each agent will place a limit order for each market pair for pair in pairs: amount = sigfig(qty_rate[pair]["qty"]) price = sigfig(qty_rate[pair]["rate"]) order_dict[pair] = [amount, price] # sort pair into dict of asset and currency asset = pair.split(":")[0] currency = pair.split(":")[1] pair_dict = { "asset": asset, "currency": currency, } # flash the curreny pair being bought/sold msg = "\033c\n\n\n" msg += it("red", "SCELETUS") msg += "\n\n\n" for i in range(50): msg += ("\n " + asset + " : " + currency + " @ " + it("cyan", price) + " qty " + it("yellow", amount)) print(msg) # make rpc for A.B.C id's and precisions ( asset_id, asset_precision, currency_id, currency_precision, ) = rpc_lookup_asset_symbols(rpc, pair_dict) # update the header respectively header["asset_id"] = asset_id header["currency_id"] = currency_id header["asset_precision"] = asset_precision header["currency_precision"] = currency_precision # perform buy ops for agents[0] and sell ops for agents[1] for idx in range(2): # build the header with correct account info header["account_name"] = name header["account_id"] = account_id header["wif"] = wif operation = ["buy", "sell"][idx] # build the final edict for this agent*pair with operation and qty_rate edict = { "op": operation, "amount": amount, "price": price, "expiration": 0, } # fetch the nodes list and shuffle nodes = race_read_json("nodes.txt") random.shuffle(nodes) # build the order with agent appropriate header and edict for this pair order = { "header": header, "edicts": [edict], "nodes": nodes, } # print the outcome and if not a demo, then live execution orders.append({ "time": time.ctime(), "unix": int(time.time()), "tick": tick, "name": name, "op": operation, "pair": pair, "price": price, "amount": amount, }) if do_sceletus: broker(order) return orders, order_dict
def on_get(self, req, resp): """ When there is a get request made to the deposit server api User GET request includes the client_id's BitShares account_name Select a gateway wallet from list currently available; remove it from the list the available address list will be stored in a json_ipc text pipe Server RESPONSE is deposit address and timeout After timeout or deposit return address to text pipe list """ confirm_time = { "eos": 30, "xrp": 2, } # create a millesecond nonce to log this event nonce = milleseconds() # extract the incoming parameters to a dictionary data = dict(req.params) timestamp() line_number() print(it("red", "DEPOSIT SERVER RECEIVED A GET REQUEST"), "\n") call(["hostname", "-I"]) print(data, "\n") client_id = data["client_id"] uia = data["uia_name"] # translate the incoming uia request to the appropriate network network = "" if uia == GATE["uia"]["xrp"]["asset_name"]: network = "xrp" elif uia == GATE["uia"]["eos"]["asset_name"]: network = "eos" print("network", network, "\n") if network in ["xrp", "eos"]: # lock an address until this transaction is complete gateway_idx = lock_address(network) print("gateway index", gateway_idx, "\n") if gateway_idx is not None: timestamp() line_number() deposit_address = GATE[network][gateway_idx]["public"] print("gateway address", deposit_address, "\n") # format a response json msg = json_dumps({ "response": "success", "server_time": nonce, "deposit_address": deposit_address, "gateway_timeout": "30 MINUTES", "msg": (f"Welcome {client_id}, please deposit your gateway issued " + f"{network} asset, to the {uia} gateway 'deposit_address' " + "in this response. Make ONE transfer to this address, " + "within the gateway_timeout specified. Transactions on " + f"this network take about {confirm_time[network]} " + "minutes to confirm."), }) print( it("red", f"STARTING {network} LISTENER TO ISSUE to {client_id}"), "\n", ) # dispatch the appropriate listener protocol listen = {"eos": listener_eosio, "xrp": listener_ripple} # in subprocess listen for payment from client_id to gateway[idx] # upon receipt issue asset, else timeout listener = Process( target=listen[network], args=(gateway_idx, None, "issue", client_id, nonce), ) listener.start() print(f"{network}listener started", "\n") else: msg = json_dumps({ "response": "error", "server_time": nonce, "msg": f"all {uia} gateway addresses are in use, " + "please try again later", }) else: msg = json_dumps({ "response": "error", "server_time": nonce, "msg": f"{uia} is an invalid gateway UIA, please try again", }) # log the response and build the response body with a data dictionary doc = str(nonce) + "_" + uia + "_" + client_id + ".txt" json_ipc(doc=doc, text=msg) time.sleep( 5) # allow some time for listener to start before offering address print(msg, "\n") resp.body = msg resp.status = HTTP_200
def listener_ripple(account_idx=0, amount=None, issuer_action=None, client_id=None, nonce=0): """ for every block from initialized until detected check for transaction to the gateway issue or reserve uia upon receipt of gateway transfer :param int(account_idx) # from gateway_state.py :param float(amount) :param str(issuer_action) # None in unit test case :param str(client_id) #1.2.X :return None: """ gateway = GATE["xrp"][account_idx]["public"] uia = GATE["uia"]["xrp"]["asset_name"] start_ledger_num = get_validated_ledger() checked_ledgers = [start_ledger_num] timestamp() line_number() print(f"nonce {nonce}", "Start ledger:", start_ledger_num, "\n") start = time.time() while 1: elapsed = time.time() - start if elapsed > DEPOSIT_TIMEOUT: print(f"nonce {nonce}", it("red", "XRP GATEWAY TIMEOUT"), gateway) # after timeout, release the address if issuer_action == "issue": unlock_address("ripple", account_idx, DEPOSIT_PAUSE) break # get the latest validated ledger number current_ledger = get_validated_ledger() # get the latest ledger number we checked max_checked_ledger = max(checked_ledgers) # if there are any new validated ledgers if current_ledger > max_checked_ledger + 1: # check every ledger from last check till now for ledger_num in range(max_checked_ledger + 1, current_ledger): print( f"nonce {nonce}", it("green", "Ripple Validated Ledger"), it("yellow", ledger_num), time.ctime()[11:19], ) # get each new validated ledger transactions = get_ledger(ledger_num) # iterate through all transactions in the list of transactions for trx in transactions: if not isinstance(trx["Amount"], dict): # localize data from the transaction amount = int( trx["Amount"]) / 10**6 # convert drops to xrp trx_from = trx["Account"] trx_to = trx["Destination"] # during unit testing if issuer_action is None: print(f"nonce {nonce}", ledger_num, trx, "\n") # determine if it is a transfer to or from the gateway if gateway in [trx_from, trx_to]: timestamp() line_number() # establish gateway transfer direction direction = "INCOMING" if gateway == trx_from: direction = "OUTGOING" print( f"nonce {nonce}", it( "red", f"{direction} XRP GATEWAY TRANSFER DETECTED\n", ), f"amount {amount}\n", f"from {trx_from}\n", f"to {trx_to}\n", ) # the client_id was assigned deposit gateway address # issue UIA to client_id upon receipt of their foreign funds if issuer_action == "issue" and trx_to == gateway: print( f"nonce {nonce}", it( "red", f"ISSUING {amount} {uia} to {client_id}\n", ), ) issue("xrp", amount, client_id) # in subprocess unlock the deposit address after wait delay = DEPOSIT_TIMEOUT - elapsed + DEPOSIT_PAUSE unlock_address("xrp", account_idx, delay) return # but immediately kill the listener # the parent process will soon send foreign funds to client_id # reserve UIA upon hearing proof of this transfer if issuer_action == "reserve" and trx_from == gateway: print( f"nonce {nonce}", it("red", f"RESERVING {amount} {uia}\n"), ) reserve("xrp", amount) return # kill the listener # append this ledger number to the list of checked numbers if ledger_num not in checked_ledgers: checked_ledgers.append(ledger_num)