def blocklast(socket): #get last block connections.send(s, "blocklast", 10) hash_last = connections.receive(s, 10) print("Last block number: {}".format(hash_last[0])) print("Last block hash: {}".format(hash_last[1]))
def send_confirm(amount_input, recipient_input, keep_input, openfield_input): top10 = Toplevel() top10.title("Confirm") # encr check if encrypt_var.get() == 1: #get recipient's public key connections.send(s, "pubkeyget", 10) connections.send(s, recipient_input, 10) target_public_key_hashed = connections.receive(s, 10) recipient_key = RSA.importKey(base64.b64decode(target_public_key_hashed).decode("utf-8")) #openfield_input = str(target_public_key.encrypt(openfield_input.encode("utf-8"), 32)) data = openfield_input.encode("utf-8") # print (open("pubkey.der").read()) session_key = get_random_bytes(16) cipher_aes = AES.new(session_key, AES.MODE_EAX) # Encrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(recipient_key) # Encrypt the data with the AES session key ciphertext, tag = cipher_aes.encrypt_and_digest(data) enc_session_key = (cipher_rsa.encrypt(session_key)) openfield_input = str([x for x in (cipher_aes.nonce, tag, ciphertext, enc_session_key)]) # encr check # msg check if encode_var.get() == 1: openfield_input = base64.b64encode(openfield_input.encode("utf-8")).decode("utf-8") # msg check if msg_var.get() == 1 and encode_var.get() == 1: openfield_input = "bmsg=" + openfield_input if msg_var.get() == 1 and encode_var.get() == 0: openfield_input = "msg=" + openfield_input if encrypt_var.get() == 1: openfield_input = "enc=" + str(openfield_input) fee = fee_calculate(openfield_input, keep_var.get()) confirmation_dialog = Text(top10, width=100) confirmation_dialog.insert(INSERT, ("Amount: {}\nFee: {}\nTotal: {}\nTo: {}\nKeep Entry: {}\nOpenField:\n\n{}".format(amount_input, fee, '%.8f' % (float(amount_input)+float(fee)), recipient_input, keep_input, openfield_input))) confirmation_dialog.grid(row=0, pady=0) enter = Button(top10, text="Confirm", command=lambda: send_confirmed(amount_input, recipient_input, keep_input, openfield_input, top10)) enter.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) done = Button(top10, text="Cancel", command=top10.destroy) done.grid(row=2, column=0, sticky=W + E, padx=15, pady=(5, 5))
def msg_received_get(addlist): for x in addlist: if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[3] == address: #print(x[11]) connections.send(s, "aliasget", 10) connections.send(s, x[2], 10) msg_address = connections.receive(s,10)[0][0] if x[11].startswith("enc=msg="): msg_received_digest = x[11].lstrip("enc=msg=") try: #msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) private_key = RSA.import_key(open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") except: msg_received_digest = "Could not decrypt message" elif x[11].startswith("enc=bmsg="): msg_received_digest = x[11].lstrip("enc=bmsg=") try: msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") #msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) private_key = RSA.import_key(open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") except: msg_received_digest = "Could not decrypt message" elif x[11].startswith("bmsg="): msg_received_digest = x[11].lstrip("bmsg=") try: msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") except: msg_received_digest = "Could not decode message" elif x[11].startswith("msg="): msg_received_digest = x[11].lstrip("msg=") msg_received.insert(INSERT, ((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(float(x[1])))) + " From " + msg_address.lstrip("alias=") + ": " + msg_received_digest) + "\n")
def addlist(socket, arg1): #get all txs for an address connections.send(s, "addlist", 10) connections.send(s, arg1, 10) address_tx_list = connections.receive(s, 10) print("All transactions for requested address:") for row in address_tx_list: print(row)
def listlim(socket, arg1): #get all txs for an address connections.send(s, "listlim", 10) connections.send(s, arg1, 10) tx_list = connections.receive(s, 10) print("All transactions for requested range:") for row in tx_list: print(row)
def aliasesget(socket, arg1): arg_split = arg1.split(",") print(arg_split) connections.send(s, "aliasesget", 10) connections.send(s, arg_split, 10) alias_results = connections.receive(s, 10) print(alias_results)
def difflast(socket): #ask for last difficulty connections.send(s, "difflast", 10) response = connections.receive(s, 10) blocklast = response[0] difflast = response[1] print("Last block: {}".format(blocklast)) print("Last difficulty: {}".format(difflast))
def blockget(socket, arg1): #get block connections.send(s, "blockget", 10) connections.send(s, arg1, 10) block_get = connections.receive(s, 10) print("Requested block: {}".format(block_get)) print("Requested block number of transactions: {}".format(len(block_get))) print("Requested block height: {}".format(block_get[0][0]))
def keygen(socket): #generate address #RECEIVES PRIVATE KEY FROM NODE connections.send(s, "keygen", 10) keys_generated = connections.receive(s, 10) print("Private key: {}".format(keys_generated[0])) print("Public key: {}".format(keys_generated[1])) print("Address: {}".format(keys_generated[2]))
def balanceget(socket, arg1): #get balance connections.send(s, "balanceget", 10) connections.send(s, arg1, 10) #balance_ledger = connections.receive(s, 10) balance_ledger = connections.receive(s, 10) print("Address balance: {}".format(balance_ledger[0])) print("Address credit: {}".format(balance_ledger[1])) print("Address debit: {}".format(balance_ledger[2])) print("Address fees: {}".format(balance_ledger[3])) print("Address rewards: {}".format(balance_ledger[4]))
def alias_register(alias_desired): connections.send(s, "aliascheck", 10) connections.send(s, alias_desired, 10) result = connections.receive(s, 10) if result == "Alias free": send("0", myaddress, "1", "alias="+alias_desired) pass else: top9 = Toplevel() top9.title("Name already registered") registered_label = Label(top9, text="Name already registered") registered_label.grid(row=0, column=0, sticky=N + W, padx=15, pady=(5, 0)) dismiss = Button(top9, text="Dismiss", command=top9.destroy) dismiss.grid(row=3, column=0, sticky=W + E, padx=15, pady=(5, 5))
def aliases_list(): top12 = Toplevel() top12.title("Your aliases") aliases_box = Text(top12, width=100) aliases_box.grid(row=0, pady=0) connections.send(s, "aliasget", 10) connections.send(s, myaddress, 10) aliases_self = connections.receive(s, 10) for x in aliases_self: aliases_box.insert(INSERT, x[0].lstrip("alias=")) aliases_box.insert(INSERT,"\n") close = Button(top12, text="Close", command=top12.destroy) close.grid(row=3, column=0, sticky=W + E, padx=15, pady=(5, 5))
def txsend(socket, arg1, arg2, arg3, arg4, arg5): #generate transaction #SENDS PRIVATE KEY TO NODE connections.send(s, "txsend", 10) remote_tx_timestamp = '%.2f' % time.time() remote_tx_privkey = arg1 #node will dump pubkey+address from this remote_tx_recipient = arg2 remote_tx_amount = arg3 remote_tx_keep = arg4 remote_tx_openfield = arg5 #connections.send(s, (remote_tx_timestamp, remote_tx_privkey, remote_tx_recipient, remote_tx_amount, remote_tx_keep, remote_tx_openfield), 10) connections.send(s, (str(remote_tx_timestamp), str(remote_tx_privkey), str(remote_tx_recipient), str(remote_tx_amount), str(remote_tx_keep), str(remote_tx_openfield)), 10) #generate transaction signature = connections.receive(s, 10) print(signature)
def statusget(socket): connections.send(s, "statusget", 10) response = connections.receive(s, 10) node_address = response[0] nodes_count = response[1] nodes_list = response[2] threads_count = response[3] uptime = response[4] consensus = response[5] consensus_percentage = response[6] version = response[7] print("Node address:", node_address) print("Number of nodes:", nodes_count) print("List of nodes:", nodes_list) print("Number of threads:", threads_count) print("Uptime:", uptime) print("Consensus:", consensus) print("Consensus percentage:", consensus_percentage) print("Version:", version)
def txsend(socket, arg1, arg2, arg3, arg4, arg5): #generate transaction #SENDS PRIVATE KEY TO NODE connections.send(s, "txsend", 10) timestamp = '%.2f' % time.time() privkey = str(arg) #node will dump pubkey+address from this recipient = str(arg2) amount = str(arg3) keep = str(arg4) openfield = str(arg5) #connections.send(s, (remote_tx_timestamp, remote_tx_privkey, remote_tx_recipient, remote_tx_amount, remote_tx_keep, remote_tx_openfield), 10) connections.send(s, (timestamp, privkey, recipient, amount, keep, openfield), 10) #generate transaction signature = connections.receive(s, 10) print(signature)
def api_gettransaction_for_recipients(self, socket_handler, db_handler, peers): """ Warning: this is currently very slow Returns the full transaction matching a tx id for a list of recipient addresses. Takes txid and format as params (json output if format is True) :param socket_handler: :param db_handler: :param peers: :return: """ transaction = {} try: # get the txid transaction_id = connections.receive(socket_handler) # then the recipient list addresses = connections.receive(socket_handler) # and format format = connections.receive(socket_handler) recipients = json.dumps(addresses).replace("[", "(").replace( ']', ')') # format as sql # raw tx details if self.config.old_sqlite: db_handler.execute_param( db_handler.h, "SELECT * FROM transactions WHERE recipient IN {} AND signature LIKE ?1" .format(recipients), (transaction_id + '%', )) else: db_handler.execute_param( db_handler.h, "SELECT * FROM transactions WHERE recipient IN {} AND substr(signature,1,4)=substr(?1,1,4) and signature LIKE ?1" .format(recipients), (transaction_id + '%', )) raw = db_handler.h.fetchone() if not format: connections.send(socket_handler, raw) print('api_gettransaction_for_recipients', format, raw) return # current block height, needed for confirmations # db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") block_height = db_handler.h.fetchone()[0] transaction['txid'] = transaction_id transaction['time'] = raw[1] transaction['hash'] = raw[5] transaction['address'] = raw[2] transaction['recipient'] = raw[3] transaction['amount'] = raw[4] transaction['fee'] = raw[8] transaction['reward'] = raw[9] transaction['operation'] = raw[10] transaction['openfield'] = raw[11] try: transaction['pubkey'] = base64.b64decode( raw[6]).decode('utf-8') except: transaction['pubkey'] = raw[6] # support new pubkey schemes transaction['blockhash'] = raw[7] transaction['blockheight'] = raw[0] transaction['confirmations'] = block_height - raw[0] # Get more info on the block the tx is in. db_handler.execute_param( db_handler.h, "SELECT timestamp, recipient FROM transactions WHERE block_height= ? AND reward > 0", (raw[0], )) block_data = db_handler.h.fetchone() transaction['blocktime'] = block_data[0] transaction['blockminer'] = block_data[1] print('api_gettransaction_for_recipients', format, transaction) connections.send(socket_handler, transaction) except Exception as e: # self.app_log.warning(e) raise
tx_submit = essentials.sign_rsa(timestamp, myaddress, recipient, amount, operation, openfield, key, public_key_hashed) if tx_submit: s = socks.socksocket() s.settimeout(0.3) print(tx_submit) s.connect(("127.0.0.1", int(5658))) print("Status: Connected to node") while True: connections.send(s, "mpinsert", 10) connections.send(s, tx_submit, 10) reply = connections.receive(s, 10) print("Payout result: {}".format(reply)) break if reply[-1] == "Success": t.execute( "INSERT INTO tweets VALUES (?, ?, ?, ?, ?)", (row[0], row[1], row[2], tweet_qualified[1], name)) twitter.commit() print("Tweet saved to database") api.retweet(tweet_id) api.update_status( "Bismuth address {} wins giveaway of {} $BIS for https://twitter.com/i/web/status/{}" .format(recipient, amount, tweet_id)) else: print("Mempool insert failure")
def peersget(socket): connections.send(s, "peersget", 10) peers_received = connections.receive(s, 10) print(peers_received)
def miner(privatekey_readable, public_key_hashed, address, miners, resultQueue): from Crypto.PublicKey import RSA Random.atfork() key = RSA.importKey(privatekey_readable) rndfile = Random.new() tries = 0 firstrun = True begin = time.time() if pool_conf == 1: #do not use pools public key to sign, signature will be invalid self_address = address address = pool_address #ask for diff percentage s_pool = socks.socksocket() s_pool.settimeout(0.3) if tor_conf == 1: s_pool.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_pool.connect((pool_ip_conf, 8525)) # connect to pool print("Connected") print( "Miner: Asking pool for share qualification difficulty requirement" ) connections.send(s_pool, "diffp", 10) pool_diff_percentage = int(connections.receive(s_pool, 10)) print( "Miner: Received pool for share qualification difficulty requirement: {}%" .format(pool_diff_percentage)) s_pool.close() #ask for diff percentage q = 0 # OpenCL Hash parameters for m in miners: m.setHeader(address.encode('utf-8')) db_block_hash = "" diff = 0 old_hashes = [] # OpenCL Hash parameters start = time.time() while True: try: # OpenCL Hash parameters old_diff = diff old_db = db_block_hash # OpenCL Hash parameters # calculate new hash nonces = 0 # calculate difficulty s_node = socks.socksocket() if tor_conf == 1: s_node.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_node.connect((node_ip_conf, int(port))) # connect to local node connections.send(s_node, "blocklast", 10) blocklast = connections.receive(s_node, 10) db_block_hash = blocklast[7] connections.send(s_node, "diffget", 10) diff = connections.receive(s_node, 10) s_node.close() diff = int(diff[1]) diff_real = int(diff) if pool_conf == 0: diff = int(diff) else: # if pooled diff_pool = diff_real diff = percentage(pool_diff_percentage, diff_real) if diff > diff_pool: diff = diff_pool mining_condition = bin_convert(db_block_hash)[0:diff] # block_hash = hashlib.sha224(str(block_send) + db_block_hash).hexdigest() # OpenCL Hash parameters if old_db == db_block_hash: if old_diff > diff: for hash in old_hashes: resultQueue.pushCandidate([hash, 0]) else: old_hashes = [] if opencl_full_check == 1: searchKey = np.uint32(int(diff)) print("Difficulty: {}".format(searchKey)) else: searchKey = np.uint32(int(db_block_hash[:8], 16)) print("Search key: {:x}".format(searchKey)) for m in miners: m.setTail(db_block_hash.encode('utf-8')) m.setKernelParams(searchKey) m.startMining() elapsed = 0 # OpenCL Hash parameters while True: cand, onlist = resultQueue.getNextCandidate(opencl_timeout - elapsed) end = time.time() if cand is None: print("Thread{} {} @ Update blockchain (timeout {} sec)". format(q, db_block_hash[:10], opencl_timeout)) break candidate = cand[0] old_hashes.append(candidate) q = cand[1] elapsed += (end - start) print( "Thread{} {} @ {:,.4f} sec to find a candidate ({} waiting to process)" .format(q, db_block_hash[:10], end - start, onlist)) start = time.time() nonce = candidate.tobytes('C').hex() #np.set_printoptions(formatter={'int':hex}) #print( "(python) Nonce: {}".format( nonce ) ) #seeder = db_block_hash.encode("utf-8") #print( "(python) Seeder: {}".format( seeder ) ) #debug_hash = hashlib.sha224((address + nonce + db_block_hash).encode("utf-8")).hexdigest() #print( "(python) Hash : {}".format( debug_hash ) ) #print( "(python) mining_condition: {}".format( db_block_hash[0:int(diff/8)+1] ) ) mining_hash = bin_convert( hashlib.sha224( (address + nonce + db_block_hash).encode("utf-8")).hexdigest()) tries = tries + 1 if mining_condition in mining_hash: tries = 0 print("Thread {} found a good block hash in {} cycles". format(q, tries)) # serialize txs block_send = [] del block_send[:] # empty removal_signature = [] del removal_signature[:] # empty s_node = socks.socksocket() if tor_conf == 1: s_node.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_node.connect((node_ip_conf, int(port))) # connect to config.txt node connections.send(s_node, "mpget", 10) data = connections.receive(s_node, 10) s_node.close() if data != "[]": mempool = data for mpdata in mempool: transaction = (str(mpdata[0]), str(mpdata[1][:56]), str(mpdata[2][:56]), '%.8f' % float(mpdata[3]), str(mpdata[4]), str(mpdata[5]), str(mpdata[6]), str(mpdata[7]) ) # create tuple # print transaction block_send.append( transaction ) # append tuple to list for each run removal_signature.append( str(mpdata[4] )) # for removal after successful mining # claim reward block_timestamp = '%.2f' % time.time() transaction_reward = (str(block_timestamp), str(address[:56]), str(address[:56]), '%.8f' % float(0), "0", str(nonce) ) # only this part is signed! # print transaction_reward h = SHA.new(str(transaction_reward).encode("utf-8")) signer = PKCS1_v1_5.new(key) signature = signer.sign(h) signature_enc = base64.b64encode(signature) if signer.verify(h, signature) == True: print("Signature valid") block_send.append( (str(block_timestamp), str(address[:56]), str(address[:56]), '%.8f' % float(0), str(signature_enc.decode("utf-8")), str(public_key_hashed), "0", str(nonce))) # mining reward tx print("Block to send: {}".format(block_send)) if not any( isinstance(el, list) for el in block_send ): # if it's not a list of lists (only the mining tx and no others) new_list = [] new_list.append(block_send) block_send = new_list # make it a list of lists # claim reward # include data tries = 0 # submit mined block to node if sync_conf == 1: check_uptodate(300) if pool_conf == 1: mining_condition = bin_convert( db_block_hash)[0:diff_real] if mining_condition in mining_hash: print( "Miner: Submitting block to all nodes, because it satisfies real difficulty too" ) threading.Thread(target=nodes_block_submit, args=(block_send, )).start() try: s_pool = socks.socksocket() s_pool.settimeout(0.3) if tor_conf == 1: s_pool.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_pool.connect( (pool_ip_conf, 8525)) # connect to pool print("Connected") print( "Miner: Proceeding to submit mined block to pool" ) connections.send(s_pool, "block", 10) connections.send(s_pool, self_address, 10) connections.send(s_pool, block_send, 10) s_pool.close() print("Miner: Block submitted to pool") except Exception as e: print("Miner: Could not submit block to pool") pass if pool_conf == 0: threading.Thread(target=nodes_block_submit, args=(block_send, )).start() else: print("Invalid signature") tries = 0 except Exception as e: print(e) time.sleep(0.1) if debug_conf == 1: raise else: pass
def table(address, addlist_20): # transaction table # data datasheet = ["Time", "From", "To", "Amount", "Type"] # show mempool txs connections.send(s, "mpget", 10) # senders mempool_total = connections.receive(s, 10) print (mempool_total) for tx in mempool_total: if tx[1] == address: datasheet.append("Unconfirmed") datasheet.append(tx[1]) datasheet.append(tx[2]) datasheet.append(tx[3]) datasheet.append("Transaction") # show mempool txs # retrieve aliases in bulk addlist_addressess = [] reclist_addressess = [] for x in addlist_20: addlist_addressess.append(x[2]) #append address reclist_addressess.append(x[3]) #append recipient #print(addlist_addressess) # define row color colors = [] for x in addlist_20: if x[3] == address: colors.append("green4") else: colors.append("indianred") # define row color if resolve_var.get() == 1: connections.send(s, "aliasesget", 10) #senders connections.send(s, addlist_addressess, 10) aliases_address_results = connections.receive(s, 10) connections.send(s, "aliasesget", 10) #recipients connections.send(s, reclist_addressess, 10) aliases_rec_results = connections.receive(s, 10) # retrieve aliases in bulk i = 0 for row in addlist_20: db_timestamp = row[1] datasheet.append(datetime.fromtimestamp(float(db_timestamp)).strftime('%Y-%m-%d %H:%M:%S')) if resolve_var.get() == 1: db_address = aliases_address_results[i].lstrip("alias=") else: db_address = row[2] datasheet.append(db_address) if resolve_var.get() == 1: db_recipient = aliases_rec_results[i].lstrip("alias=") else: db_recipient = row[3] datasheet.append(db_recipient) db_amount = row[4] db_reward = row[9] db_openfield = row[11] datasheet.append('%.8f' % (float(db_amount) + float(db_reward))) if float(db_reward) > 0: symbol = "Mined" elif db_openfield.startswith("bmsg"): symbol = "b64 Message" elif db_openfield.startswith("msg"): symbol = "Message" else: symbol = "Transaction" datasheet.append(symbol) i = i+1 # data app_log.warning(datasheet) app_log.warning(len(datasheet)) if len(datasheet) == 5: app_log.warning("Looks like a new address") elif len(datasheet) < 20 * 5: app_log.warning(len(datasheet)) table_limit = len(datasheet) / 5 else: table_limit = 20 if len(datasheet) > 5: k = 0 for child in f4.winfo_children(): # prevent hangup child.destroy() for i in range(int(table_limit)): for j in range(5): datasheet_compare = [datasheet[k], datasheet[k - 1], datasheet[k - 2], datasheet[k - 3], datasheet[k - 4]] if "Time" in datasheet_compare: #header e = Entry(f4, width=0) e.configure(readonlybackground='linen') elif j == 0: #first row e = Entry(f4, width=0) e.configure(readonlybackground='linen') elif "Unconfirmed" in datasheet_compare: # unconfirmed txs e = Entry(f4, width=0) e.configure(readonlybackground='linen') elif j == 3: #sent e = Entry(f4, width=0) e.configure(readonlybackground=colors[i-1]) elif j == 4: #last row e = Entry(f4, width=0) e.configure(readonlybackground='bisque') else: e = Entry(f4, width=0) e.configure(readonlybackground='bisque') e.grid(row=i + 1, column=j, sticky=EW) e.insert(END, datasheet[k]) e.configure(state="readonly") k = k + 1
def statusget(socket): connections.send(s, "statusjson", 10) response = connections.receive(s, 10) for key in response: print(key, ":", response[key])
def aliasget(socket, arg1): connections.send(s, "aliasget", 10) connections.send(s, arg1, 10) alias_results = connections.receive(s, 10) print (alias_results)
def diffget(s): connections.send(s, "diffget", 10) diff = float(connections.receive(s, 10)[1]) return diff
def mpinsert(s, transaction): connections.send(s, "mpinsert", 10) connections.send(s, transaction, 10) confirmation = connections.receive(s, 10) print (confirmation)
def mpget(socket): #ask for mempool connections.send(s, "mpget", 10) mempool = connections.receive(s, 10) print ("Current mempool: {}".format(mempool))
if full_ledger == 1: conn = sqlite3.connect(ledger_path) else: conn = sqlite3.connect(hyper_path) conn.text_factory = str c = conn.cursor() s = socks.socksocket() s.settimeout(10) #s.connect(("bismuth.live", 5658)) s.connect(("127.0.0.1", 5658)) connections.send(s, "balanceget", 10) connections.send(s, address, 10) # change address here to view other people's transactions stats_account = connections.receive(s, 10) balance = stats_account[0] #credit = stats_account[1] #debit = stats_account[2] #fees = stats_account[3] #rewards = stats_account[4] print("Transction address: %s" % address) print("Transction address balance: %s" % balance) # get balance def address_validate(address): if re.match('[abcdef0123456789]{56}', address): return True else:
def refresh(): global balance # print "refresh triggered" try: s = socks.socksocket() s.connect((node_ip_conf, int(port))) connections.send(s, "balanceget", 10) connections.send( s, address, 10) # change address here to view other people's transactions stats_account = connections.receive(s, 10) balance = stats_account[0] credit = stats_account[1] debit = stats_account[2] fees = stats_account[3] rewards = stats_account[4] app_log.warning("Transaction address balance: {}".format(balance)) connections.send(s, "blocklast", 10) block_get = connections.receive(s, 10) bl_height = block_get[0] db_timestamp_last = block_get[1] s.close() except: # get locally app_log.warning( "Unable to start in light mode, using local db for balance calculation" ) # global balance # print "refresh triggered" m.execute( "SELECT count(amount), sum(amount) FROM transactions WHERE address = ?;", (address, )) result = m.fetchall()[0] if result[1] != None: debit_mempool = float( result[1]) + float(result[1]) * 0.001 + int(result[0]) * 0.01 else: debit_mempool = 0 c.execute("SELECT sum(amount) FROM transactions WHERE recipient = ?;", (address, )) credit = c.fetchone()[0] c.execute("SELECT sum(amount) FROM transactions WHERE address = ?;", (address, )) debit = c.fetchone()[0] c.execute("SELECT sum(fee) FROM transactions WHERE address = ?;", (address, )) fees = c.fetchone()[0] c.execute("SELECT sum(reward) FROM transactions WHERE address = ?;", (address, )) rewards = c.fetchone()[0] c.execute("SELECT MAX(block_height) FROM transactions") bl_height = c.fetchone()[0] debit = 0 if debit is None else float('%.8f' % debit) fees = 0 if fees is None else float('%.8f' % fees) rewards = 0 if rewards is None else float('%.8f' % rewards) credit = 0 if credit is None else float('%.8f' % credit) balance = '%.8f' % (credit - debit - fees + rewards - debit_mempool) app_log.warning("Node: Transction address balance: {}".format(balance)) # calculate diff c.execute( "SELECT * FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;" ) # or it takes the first result = c.fetchall() db_timestamp_last = float(result[0][1]) # print db_timestamp_last db_block_height = result[0][0] # print timestamp_avg try: if encode_var.get() == 1: openfield_input = base64.b64encode( str(openfield.get("1.0", END).strip())) else: openfield_input = str(openfield.get("1.0", END)).strip() fee = '%.8f' % float(0.01 + (float(len(openfield_input)) / 100000) + int(keep_var.get())) # 0.01 dust app_log.warning("Fee: {}".format(fee)) except Exception as e: fee = 0.01 app_log.warning("Fee error: {}".format(e)) # calculate fee # check difficulty try: s = socks.socksocket() s.connect((node_ip_conf, int(port))) connections.send(s, "diffget", 10) diff = connections.receive(s, 10) s.close() except: # get locally app_log.warning( "Unable to start in light mode, using local db for difficulty calculation" ) diff = difficulty(c) # check difficulty diff_msg = diff[1] # network status time_now = str(time.time()) last_block_ago = float(time_now) - float(db_timestamp_last) if last_block_ago > 300: sync_msg = "{}m behind".format((int(last_block_ago / 60))) sync_msg_label.config(fg='red') else: sync_msg = "Up to date\nLast block: {}s ago".format( (int(last_block_ago))) sync_msg_label.config(fg='green') # network status # aliases # c.execute("SELECT openfield FROM transactions WHERE address = ? AND openfield LIKE ?;",(address,)+("alias="+'%',)) # aliases = c.fetchall() # app_log.warning("Aliases: "+str(aliases)) # aliases # fees_current_var.set("Current Fee: {}".format('%.8f' % float(fee))) balance_var.set("Balance: {}".format('%.8f' % float(balance))) debit_var.set("Spent Total: {}".format('%.8f' % float(debit))) credit_var.set("Received Total: {}".format('%.8f' % float(credit))) fees_var.set("Fees Paid: {}".format('%.8f' % float(fees))) rewards_var.set("Rewards: {}".format('%.8f' % float(rewards))) bl_height_var.set("Block Height: {}".format(bl_height)) diff_msg_var.set("Mining Difficulty: {}".format('%.2f' % float(diff_msg))) sync_msg_var.set("Network: {}".format(sync_msg)) table()
def miner(q, privatekey_readable, public_key_hashed, address): from Crypto.PublicKey import RSA Random.atfork() key = RSA.importKey(privatekey_readable) rndfile = Random.new() tries = 0 firstrun = True begin = time.time() if pool_conf == 1: #do not use pools public key to sign, signature will be invalid self_address = address address = pool_address #ask for diff percentage s_pool = socks.socksocket() s_pool.settimeout(0.3) if tor_conf == 1: s_pool.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_pool.connect((pool_ip_conf, 8525)) # connect to pool print("Connected") print("Miner: Asking pool for share qualification difficulty requirement") connections.send(s_pool, "diffp", 10) pool_diff_percentage = int(connections.receive(s_pool, 10)) print("Miner: Received pool for share qualification difficulty requirement: {}%".format(pool_diff_percentage)) s_pool.close() #ask for diff percentage while True: try: # calculate new hash nonces = 0 # calculate difficulty s_node = socks.socksocket() if tor_conf == 1: s_node.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_node.connect((node_ip_conf, port)) # connect to local node connections.send(s_node, "blocklast", 10) blocklast = connections.receive(s_node, 10) db_block_hash = blocklast[7] connections.send(s_node, "diffget", 10) diff = connections.receive(s_node, 10) s_node.close() diff = int(diff[1]) diff_real = diff else: # if pooled diff_pool = diff_real diff = percentage(pool_diff_percentage, diff_real) if diff > diff_pool: diff = diff_pool mining_condition = bin_convert(db_block_hash)[0:diff] # block_hash = hashlib.sha224(str(block_send) + db_block_hash).hexdigest() while tries < diff_recalc_conf: start = time.time() nonce = hashlib.sha224(rndfile.read(16)).hexdigest()[:32] mining_hash = bin_convert(hashlib.sha224((address + nonce + db_block_hash).encode("utf-8")).hexdigest()) end = time.time() if tries % 2500 == 0: #limit output try: cycles_per_second = 1/(end - start) print("Thread{} {} @ {:.2f} cycles/second, difficulty: {}({}), iteration: {}".format(q, db_block_hash[:10], cycles_per_second, diff, diff_real, tries)) except: pass tries += 1 if mining_condition in mining_hash: tries = 0 print("Thread {} found a good block hash in {} cycles".format(q, tries)) # serialize txs block_send = [] del block_send[:] # empty removal_signature = [] del removal_signature[:] # empty s_node = socks.socksocket() if tor_conf == 1: s_node.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_node.connect((node_ip_conf, port)) # connect to config.txt node connections.send(s_node, "mpget", 10) data = connections.receive(s_node, 10) s_node.close() if data != "[]": mempool = data for mpdata in mempool: transaction = ( str(mpdata[0]), str(mpdata[1][:56]), str(mpdata[2][:56]), '%.8f' % float(mpdata[3]), str(mpdata[4]), str(mpdata[5]), str(mpdata[6]), str(mpdata[7])) # create tuple # print transaction block_send.append(transaction) # append tuple to list for each run removal_signature.append(str(mpdata[4])) # for removal after successful mining # claim reward block_timestamp = '%.2f' % time.time() transaction_reward = (str(block_timestamp), str(address[:56]), str(address[:56]), '%.8f' % float(0), "0", str(nonce)) # only this part is signed! # print transaction_reward h = SHA.new(str(transaction_reward).encode("utf-8")) signer = PKCS1_v1_5.new(key) signature = signer.sign(h) signature_enc = base64.b64encode(signature) if signer.verify(h, signature): print("Signature valid") block_send.append((str(block_timestamp), str(address[:56]), str(address[:56]), '%.8f' % float(0), str(signature_enc.decode("utf-8")), str(public_key_hashed), "0", str(nonce))) # mining reward tx print("Block to send: {}".format(block_send)) if not any(isinstance(el, list) for el in block_send): # if it's not a list of lists (only the mining tx and no others) new_list = [] new_list.append(block_send) block_send = new_list # make it a list of lists # claim reward # include data tries = 0 # submit mined block to node if sync_conf == 1: check_uptodate(300) if pool_conf == 1: mining_condition = bin_convert(db_block_hash)[0:diff_real] if mining_condition in mining_hash: print("Miner: Submitting block to all nodes, because it satisfies real difficulty too") nodes_block_submit(block_send) try: s_pool = socks.socksocket() s_pool.settimeout(0.3) if tor_conf == 1: s_pool.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s_pool.connect((pool_ip_conf, 8525)) # connect to pool print("Connected") print("Miner: Proceeding to submit mined block to pool") connections.send(s_pool, "block", 10) connections.send(s_pool, self_address, 10) connections.send(s_pool, block_send, 10) print("Miner: Block submitted to pool") except Exception as e: print("Miner: Could not submit block to pool") pass finally: s_pool.close() if pool_conf == 0: nodes_block_submit(block_send) else: print("Invalid signature") tries = 0
def diffgetjson(socket): #check difficulty connections.send(s, "diffgetjson", 10) response = connections.receive(s, 10) for key in response: print(key, ":", response[key])
def addfromalias(socket, arg1): connections.send(s, "addfromalias", 10) connections.send(s, arg1, 10) address_fetch = connections.receive(s, 10) print (address_fetch)
def tokensget(socket, arg1): connections.send(s, "tokensget", 10) connections.send(s, arg1, 10) tokens_results = connections.receive(s, 10) print(tokens_results)
def peersget(socket): connections.send(s, "peersget", 10) peers_received = connections.receive(s, 10) print (peers_received)
def handle(self): peer_ip = self.request.getpeername()[0] data = connections.receive(self.request, 10) app_log.warning("Received: {} from {}".format(data, peer_ip)) # will add custom ports later if data == 'diffp': app_log.warning("Sending the share qualification difficulty requirement: {}%".format(diff_percent_number)) connections.send(self.request, diff_percent_number, 10) if data == "block": # from miner to node # sock s1 = socks.socksocket() if tor_conf == 1: s1.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s1.connect((node_ip_conf, int(port))) # connect to local node, # sock # receive block miner_address = connections.receive(self.request, 10) app_log.warning("Received a block from miner {} ({})".format(peer_ip,miner_address)) block_send = connections.receive(self.request, 10) nonce = (block_send[-1][7]) app_log.warning("Combined mined segments: {}".format(block_send)) #print(nonce) #print(block_send) #print(miner_address) # check difficulty app_log.warning("Asking node for difficulty") diff = int(diffget(s1)) app_log.warning("Calculated difficulty: {}".format(diff)) # check difficulty app_log.warning("Asking node for last block") # get last block connections.send(s1, "blocklast", 10) blocklast = connections.receive(s1, 10) db_block_hash = blocklast[7] # get last block app_log.warning("Last Hash: {}".format(db_block_hash)) mining_hash = bin_convert(hashlib.sha224((address + nonce + db_block_hash).encode("utf-8")).hexdigest()) mining_condition = bin_convert(db_block_hash)[0:diff] if mining_condition in mining_hash: app_log.warning("Difficulty requirement satisfied for mining") app_log.warning("Sending block to node {}".format(peer_ip)) global peer_dict peer_dict = {} with open("peers.txt") as f: for line in f: line = re.sub("[\)\(\:\\n\'\s]", "", line) peer_dict[line.split(",")[0]] = line.split(",")[1] for k, v in peer_dict.items(): peer_ip = k # app_log.info(HOST) peer_port = int(v) # app_log.info(PORT) # connect to all nodes try: s = socks.socksocket() s.settimeout(0.3) if tor_conf == 1: s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s.connect((peer_ip, int(peer_port))) # connect to node in peerlist app_log.warning("Connected") app_log.warning("Pool: Proceeding to submit mined block") connections.send(s, "block", 10) connections.send(s, block_send, 10) app_log.warning("Pool: Block submitted to {}".format(peer_ip)) except Exception as e: app_log.warning("Pool: Could not submit block to {} because {}".format(peer_ip, e)) pass diff_percentage = percentage(diff_percent_number, diff) app_log.warning("Pool: Current difficulty: Pool: {} Real: {}".format(diff_percentage,diff)) if diff < diff_percentage: diff_shares = diff else: diff_shares = diff_percentage shares = sqlite3.connect('shares.db') shares.text_factory = str s = shares.cursor() # protect against used share resubmission execute_param(s, ("SELECT nonce FROM nonces WHERE nonce = ?"), (nonce,)) try: result = s.fetchone()[0] app_log.warning("Miner trying to reuse a share, ignored") except: # protect against used share resubmission mining_condition = bin_convert(db_block_hash)[0:diff_shares] #floor set by pool if mining_condition in mining_hash: app_log.warning("Difficulty requirement satisfied for saving shares") execute_param(s, ("INSERT INTO nonces VALUES (?)"), (nonce,)) commit(shares) timestamp = '%.2f' % time.time() s.execute("INSERT INTO shares VALUES (?,?,?,?)", (str(miner_address), str(1), timestamp, "0")) shares.commit() else: app_log.warning("Difficulty requirement not satisfied for anything") s.close() s1.close()
def addvalidate(socket, arg1): connections.send(s, "addvalidate", 10) connections.send(s, arg1, 10) validate_result = connections.receive(s, 10) print(validate_result)
def msg_dialogue(address): connections.send(s, "addlist", 10) connections.send(s, myaddress, 10) addlist = connections.receive(s, 10) print(addlist) def msg_received_get(addlist): for x in addlist: if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[3] == address: #print(x[11]) connections.send(s, "aliasget", 10) connections.send(s, x[2], 10) msg_address = connections.receive(s,10)[0][0] if x[11].startswith("enc=msg="): msg_received_digest = x[11].lstrip("enc=msg=") try: #msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) private_key = RSA.import_key(open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") except: msg_received_digest = "Could not decrypt message" elif x[11].startswith("enc=bmsg="): msg_received_digest = x[11].lstrip("enc=bmsg=") try: msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") #msg_received_digest = key.decrypt(ast.literal_eval(msg_received_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_received_digest) private_key = RSA.import_key(open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_received_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") except: msg_received_digest = "Could not decrypt message" elif x[11].startswith("bmsg="): msg_received_digest = x[11].lstrip("bmsg=") try: msg_received_digest = base64.b64decode(msg_received_digest).decode("utf-8") except: msg_received_digest = "Could not decode message" elif x[11].startswith("msg="): msg_received_digest = x[11].lstrip("msg=") msg_received.insert(INSERT, ((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(float(x[1])))) + " From " + msg_address.lstrip("alias=") + ": " + msg_received_digest) + "\n") def msg_sent_get(addlist): for x in addlist: if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[2] == address: # print(x[11]) connections.send(s, "aliasget", 10) connections.send(s, x[3], 10) received_aliases = connections.receive(s, 10) msg_recipient = received_aliases[0][0] if x[11].startswith("enc=msg="): msg_sent_digest = x[11].lstrip("enc=msg=") try: #msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) private_key = RSA.import_key(open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_sent_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") except: msg_sent_digest = "Could not decrypt message" elif x[11].startswith("enc=bmsg="): msg_sent_digest = x[11].lstrip("enc=bmsg=") try: msg_sent_digest = base64.b64decode(msg_sent_digest).decode("utf-8") #msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) private_key = RSA.import_key(open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_sent_digest = cipher_aes.decrypt_and_verify(ciphertext, tag).decode("utf-8") except: msg_sent_digest = "Could not decrypt message" elif x[11].startswith("bmsg="): msg_sent_digest = x[11].lstrip("bmsg=") try: msg_sent_digest = base64.b64decode(msg_sent_digest).decode("utf-8") except: msg_received_digest = "Could not decode message" elif x[11].startswith("msg="): msg_sent_digest = x[11].lstrip("msg=") msg_sent.insert(INSERT, ((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(float(x[1])))) + " To " + msg_recipient.lstrip("alias=") + ": " + msg_sent_digest) + "\n") # popup top11 = Toplevel() top11.title("Messaging") Label(top11, text="Received:", width=20).grid(row=0) msg_received = Text(top11, width=100, height=20, font=("Tahoma", 8)) msg_received.grid(row=1, column=0, sticky=W, padx=5, pady=(5, 5)) msg_received_get(addlist) Label(top11, text="Sent:", width=20).grid(row=2) msg_sent = Text(top11, width=100, height=20, font=("Tahoma", 8)) msg_sent.grid(row=3, column=0, sticky=W, padx=5, pady=(5, 5)) msg_sent_get(addlist) dismiss = Button(top11, text="Dismiss", command=top11.destroy) dismiss.grid(row=5, column=0, sticky=W + E, padx=15, pady=(5, 5))
def diffget(socket): #check difficulty connections.send(s, "diffget", 10) diff = connections.receive(s, 10) print ("Current difficulty: {}".format(diff))
def aliasget(socket, arg1): connections.send(s, "aliasget", 10) connections.send(s, arg1, 10) alias_results = connections.receive(s, 10) print(alias_results)
def mpget(socket): #ask for mempool connections.send(s, "mpget", 10) mempool = connections.receive(s, 10) print("Current mempool: {}".format(mempool))
def addvalidate(socket, arg1): connections.send(s, "addvalidate", 10) connections.send(s, arg1, 10) validate_result = connections.receive(s, 10) print (validate_result)
def get(self): s = socks.socksocket() s.settimeout(10) s.connect(("127.0.0.1", 5658)) connections.send(s, "statusget", 10) response = connections.receive(s, 10) s.close() nodes_list = response[2] ips = nodes_list markers = [] print("IPs:", ips) with open('geo.json', 'w') as f: for ip in ips: getgeo = requests.request( "GET", "http://freegeoip.net/json/{}".format(ip)) response_web = json.loads(getgeo.text) try: print(response_web).encode("utf-8") except: pass markers.append("{{lat: {},".format(response_web["latitude"])) markers.append(" lng: {}}},\n".format( response_web["longitude"])) html = [] html.append("<!DOCTYPE html>\n") html.append("<html>\n") html.append("<head>\n") html.append( "<meta name='viewport' content='initial-scale=1.0, user-scalable=no'>\n" ) html.append("<meta charset='utf-8'>\n") html.append("<title>Bismuth Node Statistics</title>\n") html.append( '<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" >' ) html.append("<style>\n") html.append( "/* Always set the map height explicitly to define the size of the div\n" ) html.append("* element that contains the map. */\n") html.append("#map {\n") html.append("height: 100%;\n") html.append("}\n") html.append("/* Optional: Makes the sample page fill the window. */\n") html.append("html, body {\n") html.append("height: 100%;\n") html.append("margin: 0;\n") html.append("padding: 0;\n") html.append("}\n") html.append("</style>\n") html.append("</head>\n") html.append("<body>\n") html.append("<div id='map'></div>\n") html.append("<script>\n") html.append("\n") html.append("function initMap() {\n") html.append( "var map = new google.maps.Map(document.getElementById('map'), {\n" ) html.append("zoom: 3,\n") html.append("center: {lat: -28.024, lng: 140.887}\n") html.append("});\n") html.append( "// Create an array of alphabetical characters used to label the markers.\n" ) html.append("var labels = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';\n") html.append("// Add some markers to the map.\n") html.append( "// Note: The code uses the JavaScript Array.prototype.map() method to\n" ) html.append( "// create an array of markers based on a given 'locations' array.\n" ) html.append( "// The map() method here has nothing to do with the Google Maps API.\n" ) html.append("var markers = locations.map(function(location, i) {\n") html.append("return new google.maps.Marker({\n") html.append("position: location,\n") html.append("label: labels[i % labels.length]\n") html.append("});\n") html.append("});\n") html.append("// Add a marker clusterer to manage the markers.\n") html.append("var markerCluster = new MarkerClusterer(map, markers,\n") html.append( "{imagePath: 'https://developers.google.com/maps/documentation/javascript/examples/markerclusterer/m'});\n" ) html.append("}\n") html.append("var locations = [\n") html.append(''.join(markers)) html.append("]\n") html.append("</script>\n") html.append( "<script src='https://developers.google.com/maps/documentation/javascript/examples/markerclusterer/markerclusterer.js'>\n" ) html.append("</script>\n") html.append("<script async defer\n") html.append( "src='https://maps.googleapis.com/maps/api/js?key={}&callback=initMap'>" .format(api_key)) html.append("</script>\n") """ node_address = response[0] nodes_count = response[1] nodes_list = response[2] threads_count = response[3] uptime = response[4] consensus = response[5] consensus_percentage = response[6] version = response[7] html.append("<div class = 'col-md-8'>") html.append("Node address: {}<br>".format(node_address)) html.append("Number of nodes: {}<br>".format(nodes_count)) html.append("List of nodes: {}<br>".format(nodes_list)) html.append("Number of threads: {}<br>".format(threads_count)) html.append("Uptime: {}<br>".format(uptime)) html.append("Consensus: {}<br>".format(consensus)) html.append("Consensus percentage: {}<br>".format(consensus_percentage)) html.append("Version: {}<br>".format(version)) html.append("</div>") """ html.append("</body>\n") html.append("</html>\n") self.write(''.join(html))
def send(amount_input, recipient_input, keep_input, openfield_input): try: key except: top5 = Toplevel() top5.title("Locked") Label(top5, text="Wallet is locked", width=20).grid(row=0, pady=0) done = Button(top5, text="Cancel", command=top5.destroy) done.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) app_log.warning("Received tx command") try: float(amount_input) except: top7 = Toplevel() top7.title("Invalid amount") Label(top7, text="Amount must be a number", width=20).grid(row=0, pady=0) done = Button(top7, text="Cancel", command=top7.destroy) done.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) # alias check # alias check if len(recipient_input) != 56: top6 = Toplevel() top6.title("Invalid address") Label(top6, text="Wrong address length", width=20).grid(row=0, pady=0) done = Button(top6, text="Cancel", command=top6.destroy) done.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) else: app_log.warning("Amount: {}".format(amount_input)) app_log.warning("Recipient: {}".format(recipient_input)) app_log.warning("Keep Forever: {}".format(keep_input)) app_log.warning("OpenField Data: {}".format(openfield_input)) timestamp = '%.2f' % time.time() transaction = (str(timestamp), str(myaddress), str(recipient_input), '%.8f' % float(amount_input), str(keep_input), str(openfield_input)) # this is signed h = SHA.new(str(transaction).encode("utf-8")) signer = PKCS1_v1_5.new(key) signature = signer.sign(h) signature_enc = base64.b64encode(signature) app_log.warning("Client: Encoded Signature: {}".format(signature_enc.decode("utf-8"))) verifier = PKCS1_v1_5.new(key) if verifier.verify(h, signature) == True: fee = fee_calculate(openfield_input, keep_var.get()) if float(amount_input) < 0: app_log.warning("Client: Signature OK, but cannot use negative amounts") elif (float(amount_input) + float(fee) > float(balance)): app_log.warning("Mempool: Sending more than owned") else: app_log.warning("Client: The signature is valid, proceeding to save transaction, signature, new txhash and the public key to mempool") # print(str(timestamp), str(address), str(recipient_input), '%.8f' % float(amount_input),str(signature_enc), str(public_key_hashed), str(keep_input), str(openfield_input)) tx_submit = (str(timestamp), str(myaddress), str(recipient_input), '%.8f' % float(amount_input), str(signature_enc.decode("utf-8")), str(public_key_hashed.decode("utf-8")), str(keep_input), str(openfield_input)) while True: connections.send(s, "mpinsert", 10) connections.send(s, [tx_submit], 10) # change address here to view other people's transactions reply = connections.receive(s, 10) app_log.warning("Client: {}".format(reply)) break else: app_log.warning("Client: Invalid signature")
def refresh(address, s): global balance # print "refresh triggered" try: connections.send(s, "balanceget", 10) connections.send(s, address, 10) # change address here to view other people's transactions stats_account = connections.receive(s, 10) balance = stats_account[0] credit = stats_account[1] debit = stats_account[2] fees = stats_account[3] rewards = stats_account[4] app_log.warning("Transaction address balance: {}".format(balance)) connections.send(s, "blocklast", 10) block_get = connections.receive(s, 10) bl_height = block_get[0] db_timestamp_last = block_get[1] except: pass try: if encode_var.get() == 1: openfield_input = base64.b64encode(str(openfield.get("1.0", END).strip())) else: openfield_input = str(openfield.get("1.0", END)).strip() fee = '%.8f' % float(0.01 + (float(len(openfield_input)) / 100000) + int(keep_var.get())) # 0.01 dust #app_log.warning("Fee: {}".format(fee)) except Exception as e: fee = 0.01 app_log.warning("Fee error: {}".format(e)) # calculate fee # check difficulty connections.send(s, "diffget", 10) diff = connections.receive(s, 10) # check difficulty diff_msg = diff[1] # network status time_now = str(time.time()) last_block_ago = float(time_now) - float(db_timestamp_last) if last_block_ago > 300: sync_msg = "{}m behind".format((int(last_block_ago / 60))) sync_msg_label.config(fg='red') else: sync_msg = "Up to date\nLast block: {}s ago".format((int(last_block_ago))) sync_msg_label.config(fg='green') # network status # fees_current_var.set("Current Fee: {}".format('%.8f' % float(fee))) balance_var.set("Balance: {}".format('%.8f' % float(balance))) balance_raw.set('%.8f' % float(balance)) debit_var.set("Spent Total: {}".format('%.8f' % float(debit))) credit_var.set("Received Total: {}".format('%.8f' % float(credit))) fees_var.set("Fees Paid: {}".format('%.8f' % float(fees))) rewards_var.set("Rewards: {}".format('%.8f' % float(rewards))) bl_height_var.set("Block Height: {}".format(bl_height)) diff_msg_var.set("Mining Difficulty: {}".format('%.2f' % float(diff_msg))) sync_msg_var.set("Network: {}".format(sync_msg)) connections.send(s, "addlistlim", 10) connections.send(s, address, 10) connections.send(s, "20", 10) addlist = connections.receive(s, 10) addlist_20 = addlist[:20] # limit table(address, addlist_20)
def worker(host, port, node): logger = node.logger this_client = f"{host}:{port}" if node.IS_STOPPING: return dict_ip = {'ip': host} node.plugin_manager.execute_filter_hook('peer_ip', dict_ip) client_instance_worker = classes.Client() if node.peers.is_banned(host) or dict_ip['ip'] == 'banned': client_instance_worker.banned = True node.logger.app_log.warning(f"IP {host} is banned, won't connect") return timeout_operation = 60 # timeout timer_operation = time.time() # start counting try: s = socks.socksocket() if node.tor_conf: s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) # s.setblocking(0) s.connect((host, port)) node.logger.app_log.info(f"Outbound: Connected to {this_client}") client_instance_worker.connected = True # communication starter send(s, "version") send(s, node.version) data = receive(s) if data == "ok": node.logger.app_log.info( f"Outbound: Node protocol version of {this_client} matches our client" ) else: raise ValueError( f"Outbound: Node protocol version of {this_client} mismatch") # If we are post pow fork, then the peer has getversion command # if node.last_block >= POW_FORK - FORK_AHEAD: # Peers that are not up to date will disconnect since they don't know that command. # That is precisely what we need :D send(s, "getversion") peer_version = receive(s) if peer_version not in node.version_allow: raise ValueError( f"Outbound: Incompatible peer version {peer_version} from {this_client}" ) send(s, "hello") # communication starter except Exception as e: node.logger.app_log.info(f"Could not connect to {this_client}: {e}") return # can return here, because no lists are affected yet # if node.last_block >= POW_FORK - FORK_AHEAD: node.peers.store_mainnet(host, peer_version) try: peer_ip = s.getpeername()[0] except: # Should not happen, extra safety node.logger.app_log.warning( "Outbound: Transport endpoint was not connected") return if this_client not in node.peers.connection_pool: node.peers.append_client(this_client) node.logger.app_log.info(f"Connected to {this_client}") node.logger.app_log.info( f"Current active pool: {node.peers.connection_pool}") if not client_instance_worker.banned and node.peers.version_allowed( host, node.version_allow) and not node.IS_STOPPING: db_handler_instance = dbhandler.DbHandler( node.index_db, node.ledger_path_conf, node.hyper_path_conf, node.full_ledger, node.ram_conf, node.ledger_ram_file, logger) while not client_instance_worker.banned and node.peers.version_allowed( host, node.version_allow) and not node.IS_STOPPING: try: #ensure_good_peer_version(host) data = receive(s) # receive data, one and the only root point # print(data) if data == "peers": subdata = receive(s) node.peers.peersync(subdata) elif data == "sync": if not time.time() <= timer_operation + timeout_operation: timer_operation = time.time() # reset timer try: while len(node.syncing) >= 3: if node.IS_STOPPING: return time.sleep(int(node.pause_conf)) node.syncing.append(peer_ip) # sync start # send block height, receive block height send(s, "blockheight") db_handler_instance.execute( db_handler_instance.c, 'SELECT max(block_height) FROM transactions') db_block_height = db_handler_instance.c.fetchone()[0] node.logger.app_log.info( f"Outbound: Sending block height to compare: {db_block_height}" ) # append zeroes to get static length send(s, db_block_height) received_block_height = receive( s) # receive node's block height node.logger.app_log.info( f"Outbound: Node {peer_ip} is at block height: {received_block_height}" ) if int(received_block_height) < db_block_height: node.logger.app_log.warning( f"Outbound: We have a higher block ({db_block_height}) than {peer_ip} ({received_block_height}), sending" ) data = receive(s) # receive client's last block_hash # send all our followup hashes node.logger.app_log.info( f"Outbound: Will seek the following block: {data}") # consensus pool 2 (active connection) consensus_blockheight = int(received_block_height) node.peers.consensus_add(peer_ip, consensus_blockheight, s, node.last_block) # consensus pool 2 (active connection) try: db_handler_instance.execute_param( db_handler_instance.h3, "SELECT block_height FROM transactions WHERE block_hash = ?;", (data, )) client_block = db_handler_instance.h3.fetchone()[0] except Exception: node.logger.app_log.warning( f"Outbound: Block {data[:8]} of {peer_ip} not found" ) send(s, "blocknf") send(s, data) else: node.logger.app_log.info( f"Outbound: Node is at block {client_block}" ) # now check if we have any newer db_handler_instance.execute( db_handler_instance.h3, 'SELECT block_hash FROM transactions ORDER BY block_height DESC LIMIT 1' ) db_block_hash = db_handler_instance.h3.fetchone()[ 0] # get latest block_hash if db_block_hash == data or not node.egress: if not node.egress: node.logger.app_log.warning( f"Outbound: Egress disabled for {peer_ip}" ) time.sleep(int( node.pause_conf)) # reduce CPU usage else: node.logger.app_log.info( f"Outbound: Node {peer_ip} has the latest block" ) # TODO: this is unlikely to happen due to conditions above, consider removing send(s, "nonewblk") else: blocks_fetched = [] while sys.getsizeof( str(blocks_fetched) ) < 500000: # limited size based on txs in blocks # db_handler.execute_param(db_handler.h3, ("SELECT block_height, timestamp,address,recipient,amount,signature,public_key,keep,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;"),(str(int(client_block)),) + (str(int(client_block + 1)),)) db_handler_instance.execute_param( db_handler_instance.h3, ("SELECT timestamp,address,recipient,amount,signature,public_key,operation,openfield FROM transactions WHERE block_height > ? AND block_height <= ?;" ), ( str(int(client_block)), str(int(client_block + 1)), )) result = db_handler_instance.h3.fetchall() if not result: break blocks_fetched.extend([result]) client_block = int(client_block) + 1 # blocks_send = [[l[1:] for l in group] for _, group in groupby(blocks_fetched, key=itemgetter(0))] # remove block number node.logger.app_log.info( f"Outbound: Selected {blocks_fetched}") send(s, "blocksfnd") confirmation = receive(s) if confirmation == "blockscf": node.logger.app_log.info( "Outbound: Client confirmed they want to sync from us" ) send(s, blocks_fetched) elif confirmation == "blocksrj": node.logger.app_log.info( "Outbound: Client rejected to sync from us because we're dont have the latest block" ) elif int(received_block_height) >= db_block_height: if int(received_block_height) == db_block_height: node.logger.app_log.info( f"Outbound: We have the same block as {peer_ip} ({received_block_height}), hash will be verified" ) else: node.logger.app_log.warning( f"Outbound: We have a lower block ({db_block_height}) than {peer_ip} ({received_block_height}), hash will be verified" ) db_handler_instance.execute( db_handler_instance.c, 'SELECT block_hash FROM transactions ORDER BY block_height DESC LIMIT 1' ) db_block_hash = db_handler_instance.c.fetchone()[ 0] # get latest block_hash node.logger.app_log.info( f"Outbound: block_hash to send: {db_block_hash}") send(s, db_block_hash) #ensure_good_peer_version(host) # consensus pool 2 (active connection) consensus_blockheight = int( received_block_height ) # str int to remove leading zeros node.peers.consensus_add(peer_ip, consensus_blockheight, s, node.last_block) # consensus pool 2 (active connection) except Exception as e: node.logger.app_log.info(f"Outbound: Sync failed {e}") finally: node.syncing.remove(peer_ip) elif data == "blocknf": # one of the possible outcomes block_hash_delete = receive(s) # print peer_ip # if max(consensus_blockheight_list) == int(received_block_height): if int(received_block_height) == node.peers.consensus_max: blocknf(node, block_hash_delete, peer_ip, db_handler_instance) if node.peers.warning(s, peer_ip, "Rollback", 2): raise ValueError(f"{peer_ip} is banned") sendsync(s, peer_ip, "Block not found", False, node) elif data == "blocksfnd": node.logger.app_log.info( f"Outbound: Node {peer_ip} has the block(s)" ) # node should start sending txs in this step # node.logger.app_log.info("Inbound: Combined segments: " + segments) # print peer_ip if node.db_lock.locked(): node.logger.app_log.warning( f"Skipping sync from {peer_ip}, syncing already in progress" ) else: db_handler_instance.execute( db_handler_instance.c, "SELECT timestamp FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;" ) # or it takes the first node.last_block_timestamp = quantize_two( db_handler_instance.c.fetchone()[0]) if int(node.last_block_timestamp) < (time.time() - 600): block_req = node.peers.consensus_most_common node.logger.app_log.warning( "Most common block rule triggered") else: block_req = node.peers.consensus_max node.logger.app_log.warning( "Longest chain rule triggered") #ensure_good_peer_version(host) if int(received_block_height) >= block_req: try: # they claim to have the longest chain, things must go smooth or ban node.logger.app_log.warning( f"Confirming to sync from {peer_ip}") send(s, "blockscf") segments = receive(s) #ensure_good_peer_version(host) except: if node.peers.warning( s, peer_ip, "Failed to deliver the longest chain", 2): raise ValueError(f"{peer_ip} is banned") else: digest_block(node, segments, s, peer_ip, db_handler_instance) # receive theirs else: send(s, "blocksrj") node.logger.app_log.warning( f"Inbound: Distant peer {peer_ip} is at {received_block_height}, should be at least {block_req}" ) sendsync(s, peer_ip, "Block found", True, node) # block_hash validation end elif data == "nonewblk": # send and receive mempool if mp.MEMPOOL.sendable(peer_ip): mempool_txs = mp.MEMPOOL.tx_to_send(peer_ip) # node.logger.app_log.info("Outbound: Extracted from the mempool: " + str(mempool_txs)) # improve: sync based on signatures only # if len(mempool_txs) > 0: #wont sync mempool until we send something, which is bad # send own send(s, "mempool") send(s, mempool_txs) # send own # receive theirs segments = receive(s) node.logger.app_log.info( mp.MEMPOOL.merge(segments, peer_ip, db_handler_instance.c, True)) # receive theirs # Tell the mempool we just send our pool to a peer mp.MEMPOOL.sent(peer_ip) sendsync(s, peer_ip, "No new block", True, node) elif data == "hyperlane": pass else: if data == '*': raise ValueError("Broken pipe") raise ValueError( f"Unexpected error, received: {str(data)[:32]}") except Exception as e: """ exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(exc_type, fname, exc_tb.tb_lineno) """ # remove from active pool node.peers.remove_client(this_client) node.logger.app_log.warning( f"Outbound: Disconnected from {this_client}: {e}") # remove from active pool # remove from consensus 2 node.peers.consensus_remove(peer_ip) # remove from consensus 2 node.logger.app_log.info( f"Connection to {this_client} terminated due to {e}") node.logger.app_log.info( f"---thread {threading.currentThread()} ended---") # properly end the connection s.close() # properly end the connection if node.debug_conf: raise # major debug client else: node.logger.app_log.info(f"Ending thread, because {e}") return if not node.peers.version_allowed(host, node.version_allow): node.logger.app_log.warning( f"Outbound: Ending thread, because {host} has too old a version: {node.peers.ip_to_mainnet[host]}" )
def peers_test(self, file, peerdict: dict, strict=True): """Validates then adds a peer to the peer list on disk""" # called by Sync, should not be an issue, but check if needs to be thread safe or not. # also called by self.client_loop, which is to be reworked # Egg: Needs to be thread safe. self.peerlist_updated = False try: with open(file, "r") as peer_file: peers_pairs = json.load(peer_file) # TODO: rework, because this takes too much time and freezes the status thread. # to be done in a dedicated thread, with one peer per xx seconds, not all at once, and added properties. for ip, port in dict(peerdict).items(): # I do create a new dict copy above, because logs showed that the dict can change while iterating if self.node.IS_STOPPING: # Early exit if stopping return try: if ip not in peers_pairs: self.app_log.info( f"Testing connectivity to: {ip}:{port}") s = socks.socksocket() try: # connect timeout s.settimeout(5) if self.config.tor: s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) if strict: s.connect((ip, int(port))) connections.send(s, "getversion") versiongot = connections.receive(s, timeout=1) if versiongot == "*": raise ValueError("peer busy") if versiongot not in self.config.version_allow: raise ValueError( f"cannot save {ip}, incompatible protocol version {versiongot} " f"not in {self.config.version_allow}") self.app_log.info( f"Inbound: Distant peer {ip}:{port} responding: {versiongot}" ) else: s.connect((ip, int(port))) finally: # properly end the connection in all cases try: s.close() except: pass peers_pairs[ip] = port self.app_log.info( f"Inbound: Peer {ip}:{port} saved to peers") self.peerlist_updated = True else: self.app_log.info("Distant peer already in peers") except Exception as e: # exception for a single peer self.app_log.info( f"Inbound: Distant peer not connectible ({e})") if self.peerlist_updated: self.app_log.warning( f"{file} peerlist updated ({len(peers_pairs)}) total" ) # the whole dict is saved with open(f"{file}.tmp", "w") as peer_file: json.dump(peers_pairs, peer_file) shutil.move(f"{file}.tmp", file) else: self.app_log.warning( f"{file} peerlist update skipped, no changes") except Exception as e: # Exception for the file itself. self.app_log.info(f"Error reading {file}: '{e}'")
def table(address, addlist_20): # transaction table # data datasheet = ["Time", "From", "To", "Amount", "Type"] # show mempool txs connections.send(s, "mpget", 10) # senders mempool_total = connections.receive(s, 10) print (mempool_total) colors = [] for tx in mempool_total: if tx[1] == address: datasheet.append("Unconfirmed") datasheet.append(tx[1]) datasheet.append(tx[2]) datasheet.append(tx[3]) datasheet.append("Transaction") colors.append("bisque") # show mempool txs # retrieve aliases in bulk addlist_addressess = [] reclist_addressess = [] for x in addlist_20: addlist_addressess.append(x[2]) #append address reclist_addressess.append(x[3]) #append recipient #print(addlist_addressess) # define row color for x in addlist_20: if x[3] == address: colors.append("green4") else: colors.append("indianred") # define row color if resolve_var.get() == 1: connections.send(s, "aliasesget", 10) #senders connections.send(s, addlist_addressess, 10) aliases_address_results = connections.receive(s, 10) connections.send(s, "aliasesget", 10) #recipients connections.send(s, reclist_addressess, 10) aliases_rec_results = connections.receive(s, 10) # retrieve aliases in bulk i = 0 for row in addlist_20: db_timestamp = row[1] datasheet.append(datetime.fromtimestamp(float(db_timestamp)).strftime('%Y-%m-%d %H:%M:%S')) if resolve_var.get() == 1: db_address = aliases_address_results[i].lstrip("alias=") else: db_address = row[2] datasheet.append(db_address) if resolve_var.get() == 1: db_recipient = aliases_rec_results[i].lstrip("alias=") else: db_recipient = row[3] datasheet.append(db_recipient) db_amount = row[4] db_reward = row[9] db_openfield = row[11] datasheet.append('%.8f' % (float(db_amount) + float(db_reward))) if float(db_reward) > 0: symbol = "Mined" elif db_openfield.startswith("bmsg"): symbol = "b64 Message" elif db_openfield.startswith("msg"): symbol = "Message" else: symbol = "Transaction" datasheet.append(symbol) i = i+1 # data app_log.warning(datasheet) app_log.warning(len(datasheet)) if len(datasheet) == 5: app_log.warning("Looks like a new address") elif len(datasheet) < 20 * 5: app_log.warning(len(datasheet)) table_limit = len(datasheet) / 5 else: table_limit = 20 if len(datasheet) > 5: k = 0 for child in f4.winfo_children(): # prevent hangup child.destroy() for i in range(int(table_limit)): for j in range(5): datasheet_compare = [datasheet[k], datasheet[k - 1], datasheet[k - 2], datasheet[k - 3], datasheet[k - 4]] if "Time" in datasheet_compare: #header e = Entry(f4, width=0) e.configure(readonlybackground='linen') elif j == 0: #first row e = Entry(f4, width=0) e.configure(readonlybackground='linen') elif "Unconfirmed" in datasheet_compare: # unconfirmed txs e = Entry(f4, width=0) e.configure(readonlybackground='linen') elif j == 3: #sent e = Entry(f4, width=0) e.configure(readonlybackground=colors[i - 1]) elif j == 4: #last row e = Entry(f4, width=0) e.configure(readonlybackground='bisque') else: e = Entry(f4, width=0) e.configure(readonlybackground='bisque') e.grid(row=i + 1, column=j, sticky=EW) e.insert(END, datasheet[k]) e.configure(state="readonly") k = k + 1
def api_gettransactionbysignature(self, socket_handler, db_handler, peers): """ Returns the full transaction matching a signature. Takes signature and format as params (json output if format is True) :param socket_handler: :param db_handler: :param peers: :return: """ transaction = {} try: # get the txid signature = connections.receive(socket_handler) # and format format = connections.receive(socket_handler) # raw tx details if self.config.old_sqlite: db_handler.execute_param( db_handler.h, "SELECT * FROM transactions WHERE signature = ?1", (signature, )) else: db_handler.execute_param( db_handler.h, "SELECT * FROM transactions WHERE substr(signature,1,4)=substr(?1,1,4) and signature = ?1", (signature, )) raw = db_handler.h.fetchone() if not format: connections.send(socket_handler, raw) print('api_gettransactionbysignature', format, raw) return # current block height, needed for confirmations db_handler.execute(db_handler.h, "SELECT MAX(block_height) FROM transactions") block_height = db_handler.h.fetchone()[0] transaction['signature'] = signature transaction['time'] = raw[1] transaction['hash'] = raw[5] transaction['address'] = raw[2] transaction['recipient'] = raw[3] transaction['amount'] = raw[4] transaction['fee'] = raw[8] transaction['reward'] = raw[9] transaction['operation'] = raw[10] transaction['openfield'] = raw[11] try: transaction['pubkey'] = base64.b64decode( raw[6]).decode('utf-8') except: transaction['pubkey'] = raw[6] # support new pubkey schemes transaction['blockhash'] = raw[7] transaction['blockheight'] = raw[0] transaction['confirmations'] = block_height - raw[0] # Get more info on the block the tx is in. db_handler.execute_param( db_handler.h, "SELECT timestamp, recipient FROM transactions WHERE block_height= ? AND reward > 0", (raw[0], )) block_data = db_handler.h.fetchone() transaction['blocktime'] = block_data[0] transaction['blockminer'] = block_data[1] print('api_gettransactionbysignature', format, transaction) connections.send(socket_handler, transaction) except Exception as e: # self.app_log.warning(e) raise
def msg_sent_get(addlist): for x in addlist: if x[11].startswith(("msg=", "bmsg=", "enc=msg=", "enc=bmsg=")) and x[2] == address: # print(x[11]) connections.send(s, "aliasget", 10) connections.send(s, x[3], 10) received_aliases = connections.receive(s, 10) msg_recipient = received_aliases[0][0] if x[11].startswith("enc=msg="): msg_sent_digest = x[11].lstrip("enc=msg=") try: #msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) private_key = RSA.import_key( open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_sent_digest = cipher_aes.decrypt_and_verify( ciphertext, tag).decode("utf-8") except: msg_sent_digest = "Could not decrypt message" elif x[11].startswith("enc=bmsg="): msg_sent_digest = x[11].lstrip("enc=bmsg=") try: msg_sent_digest = base64.b64decode( msg_sent_digest).decode("utf-8") #msg_sent_digest = key.decrypt(ast.literal_eval(msg_sent_digest)).decode("utf-8") (cipher_aes_nonce, tag, ciphertext, enc_session_key) = ast.literal_eval(msg_sent_digest) private_key = RSA.import_key( open("privkey.der").read()) # Decrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(private_key) session_key = cipher_rsa.decrypt(enc_session_key) # Decrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) msg_sent_digest = cipher_aes.decrypt_and_verify( ciphertext, tag).decode("utf-8") except: msg_sent_digest = "Could not decrypt message" elif x[11].startswith("bmsg="): msg_sent_digest = x[11].lstrip("bmsg=") try: msg_sent_digest = base64.b64decode( msg_sent_digest).decode("utf-8") except: msg_received_digest = "Could not decode message" elif x[11].startswith("msg="): msg_sent_digest = x[11].lstrip("msg=") msg_sent.insert( INSERT, ((time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(float(x[1])))) + " To " + msg_recipient.lstrip("alias=") + ": " + msg_sent_digest) + "\n")
def get(self): s = socks.socksocket() s.settimeout(10) s.connect(("127.0.0.1", 5658)) connections.send(s, "statusget", 10) response = connections.receive(s, 10) s.close() nodes_list = response[2] ips = nodes_list markers = [] print("IPs:",ips) with open('geo.json', 'w') as f: for ip in ips: getgeo = requests.request("GET", "http://freegeoip.net/json/{}".format(ip)) response_web = json.loads(getgeo.text) try: print(response_web).encode("utf-8") except: pass markers.append("{{lat: {},".format(response_web["latitude"])) markers.append(" lng: {}}},\n".format(response_web["longitude"])) html = [] html.append("<!DOCTYPE html>\n") html.append("<html>\n") html.append("<head>\n") html.append("<meta name='viewport' content='initial-scale=1.0, user-scalable=no'>\n") html.append("<meta charset='utf-8'>\n") html.append("<title>Bismuth Node Statistics</title>\n") html.append('<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" >') html.append("<style>\n") html.append("/* Always set the map height explicitly to define the size of the div\n") html.append("* element that contains the map. */\n") html.append("#map {\n") html.append("height: 100%;\n") html.append("}\n") html.append("/* Optional: Makes the sample page fill the window. */\n") html.append("html, body {\n") html.append("height: 100%;\n") html.append("margin: 0;\n") html.append("padding: 0;\n") html.append("}\n") html.append("</style>\n") html.append("</head>\n") html.append("<body>\n") html.append("<div id='map'></div>\n") html.append("<script>\n") html.append("\n") html.append("function initMap() {\n") html.append("var map = new google.maps.Map(document.getElementById('map'), {\n") html.append("zoom: 3,\n") html.append("center: {lat: -28.024, lng: 140.887}\n") html.append("});\n") html.append("// Create an array of alphabetical characters used to label the markers.\n") html.append("var labels = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';\n") html.append("// Add some markers to the map.\n") html.append("// Note: The code uses the JavaScript Array.prototype.map() method to\n") html.append("// create an array of markers based on a given 'locations' array.\n") html.append("// The map() method here has nothing to do with the Google Maps API.\n") html.append("var markers = locations.map(function(location, i) {\n") html.append("return new google.maps.Marker({\n") html.append("position: location,\n") html.append("label: labels[i % labels.length]\n") html.append("});\n") html.append("});\n") html.append("// Add a marker clusterer to manage the markers.\n") html.append("var markerCluster = new MarkerClusterer(map, markers,\n") html.append("{imagePath: 'https://developers.google.com/maps/documentation/javascript/examples/markerclusterer/m'});\n") html.append("}\n") html.append("var locations = [\n") html.append(''.join(markers)) html.append("]\n") html.append("</script>\n") html.append("<script src='https://developers.google.com/maps/documentation/javascript/examples/markerclusterer/markerclusterer.js'>\n") html.append("</script>\n") html.append("<script async defer\n") html.append("src='https://maps.googleapis.com/maps/api/js?key={}&callback=initMap'>".format(api_key)) html.append("</script>\n") """ node_address = response[0] nodes_count = response[1] nodes_list = response[2] threads_count = response[3] uptime = response[4] consensus = response[5] consensus_percentage = response[6] version = response[7] html.append("<div class = 'col-md-8'>") html.append("Node address: {}<br>".format(node_address)) html.append("Number of nodes: {}<br>".format(nodes_count)) html.append("List of nodes: {}<br>".format(nodes_list)) html.append("Number of threads: {}<br>".format(threads_count)) html.append("Uptime: {}<br>".format(uptime)) html.append("Consensus: {}<br>".format(consensus)) html.append("Consensus percentage: {}<br>".format(consensus_percentage)) html.append("Version: {}<br>".format(version)) html.append("</div>") """ html.append("</body>\n") html.append("</html>\n") self.write(''.join(html))
def blocklastjson(socket): #get last block connections.send(s, "blocklastjson", 10) response = connections.receive(s, 10) for key in response: print(key, ":", response[key])
def handle(self): peer_ip = self.request.getpeername()[0] data = connections.receive(self.request, 10) app_log.warning("Received: {} from {}".format( data, peer_ip)) # will add custom ports later if data == 'diffp': app_log.warning( "Sending the share qualification difficulty requirement: {}%". format(diff_percent_number)) connections.send(self.request, diff_percent_number, 10) if data == "block": # from miner to node # sock s1 = socks.socksocket() if tor_conf == 1: s1.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s1.connect((node_ip_conf, int(port))) # connect to local node, # sock # receive block miner_address = connections.receive(self.request, 10) app_log.warning("Received a block from miner {} ({})".format( peer_ip, miner_address)) block_send = connections.receive(self.request, 10) nonce = (block_send[-1][7]) app_log.warning("Combined mined segments: {}".format(block_send)) #print(nonce) #print(block_send) #print(miner_address) # check difficulty app_log.warning("Asking node for difficulty") diff = int(diffget(s1)) app_log.warning("Calculated difficulty: {}".format(diff)) # check difficulty app_log.warning("Asking node for last block") # get last block connections.send(s1, "blocklast", 10) blocklast = connections.receive(s1, 10) db_block_hash = blocklast[7] # get last block app_log.warning("Last Hash: {}".format(db_block_hash)) mining_hash = bin_convert( hashlib.sha224((address + nonce + db_block_hash).encode("utf-8")).hexdigest()) mining_condition = bin_convert(db_block_hash)[0:diff] if mining_condition in mining_hash: app_log.warning("Difficulty requirement satisfied for mining") app_log.warning("Sending block to node {}".format(peer_ip)) global peer_dict peer_dict = {} with open("peers.txt") as f: for line in f: line = re.sub("[\)\(\:\\n\'\s]", "", line) peer_dict[line.split(",")[0]] = line.split(",")[1] for k, v in peer_dict.items(): peer_ip = k # app_log.info(HOST) peer_port = int(v) # app_log.info(PORT) # connect to all nodes try: s = socks.socksocket() s.settimeout(0.3) if tor_conf == 1: s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) s.connect(( peer_ip, int(peer_port))) # connect to node in peerlist app_log.warning("Connected") app_log.warning( "Pool: Proceeding to submit mined block") connections.send(s, "block", 10) connections.send(s, block_send, 10) app_log.warning( "Pool: Block submitted to {}".format(peer_ip)) except Exception as e: app_log.warning( "Pool: Could not submit block to {} because {}" .format(peer_ip, e)) pass diff_percentage = percentage(diff_percent_number, diff) app_log.warning( "Pool: Current difficulty: Pool: {} Real: {}".format( diff_percentage, diff)) if diff < diff_percentage: diff_shares = diff else: diff_shares = diff_percentage shares = sqlite3.connect('shares.db') shares.text_factory = str s = shares.cursor() # protect against used share resubmission execute_param(s, ("SELECT nonce FROM nonces WHERE nonce = ?"), (nonce, )) try: result = s.fetchone()[0] app_log.warning("Miner trying to reuse a share, ignored") except: # protect against used share resubmission mining_condition = bin_convert(db_block_hash)[ 0:diff_shares] #floor set by pool if mining_condition in mining_hash: app_log.warning( "Difficulty requirement satisfied for saving shares") execute_param(s, ("INSERT INTO nonces VALUES (?)"), (nonce, )) commit(shares) timestamp = '%.2f' % time.time() s.execute("INSERT INTO shares VALUES (?,?,?,?)", (str(miner_address), str(1), timestamp, "0")) shares.commit() else: app_log.warning( "Difficulty requirement not satisfied for anything") s.close() s1.close()
import essentials, options, connections, socks config = options.Get() config.read() node_ip = config.node_ip port = config.port key, public_key_readable, private_key_readable, encrypted, unlocked, public_key_b64encoded, address = essentials.keys_load( "privkey.der", "pubkey.der") s = socks.socksocket() s.settimeout(10) s.connect((node_ip, int(port))) connections.send(s, "balanceget", 10) connections.send(s, address, 10) balanceget_result = connections.receive(s, 10) print("Address balance: {}".format(balanceget_result[0])) print("Address credit: {}".format(balanceget_result[1])) print("Address debit: {}".format(balanceget_result[2])) print("Address fees: {}".format(balanceget_result[3])) print("Address rewards: {}".format(balanceget_result[4])) print("Address balance without mempool: {}".format(balanceget_result[5])) # get balance
def refresh(): global balance # print "refresh triggered" try: s = socks.socksocket() s.connect((node_ip_conf, int(port))) connections.send(s, "balanceget", 10) connections.send(s, address, 10) # change address here to view other people's transactions stats_account = connections.receive(s, 10) balance = stats_account[0] credit = stats_account[1] debit = stats_account[2] fees = stats_account[3] rewards = stats_account[4] app_log.warning("Transaction address balance: {}".format(balance)) connections.send(s, "blocklast", 10) block_get = connections.receive(s, 10) bl_height = block_get[0] db_timestamp_last = block_get[1] s.close() except: # get locally app_log.warning("Unable to start in light mode, using local db for balance calculation") # global balance # print "refresh triggered" m.execute("SELECT count(amount), sum(amount) FROM transactions WHERE address = ?;", (address,)) result = m.fetchall()[0] if result[1] != None: debit_mempool = float(result[1]) + float(result[1]) * 0.001 + int(result[0]) * 0.01 else: debit_mempool = 0 c.execute("SELECT sum(amount) FROM transactions WHERE recipient = ?;", (address,)) credit = c.fetchone()[0] c.execute("SELECT sum(amount) FROM transactions WHERE address = ?;", (address,)) debit = c.fetchone()[0] c.execute("SELECT sum(fee) FROM transactions WHERE address = ?;", (address,)) fees = c.fetchone()[0] c.execute("SELECT sum(reward) FROM transactions WHERE address = ?;", (address,)) rewards = c.fetchone()[0] c.execute("SELECT MAX(block_height) FROM transactions") bl_height = c.fetchone()[0] debit = 0 if debit is None else float('%.8f' % debit) fees = 0 if fees is None else float('%.8f' % fees) rewards = 0 if rewards is None else float('%.8f' % rewards) credit = 0 if credit is None else float('%.8f' % credit) balance = '%.8f' % (credit - debit - fees + rewards - debit_mempool) app_log.warning("Node: Transction address balance: {}".format(balance)) # calculate diff c.execute("SELECT * FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;") # or it takes the first result = c.fetchall() db_timestamp_last = float(result[0][1]) # print db_timestamp_last db_block_height = result[0][0] # print timestamp_avg try: if encode_var.get() == 1: openfield_input = base64.b64encode(str(openfield.get("1.0", END).strip())) else: openfield_input = str(openfield.get("1.0", END)).strip() fee = '%.8f' % float(0.01 + (float(len(openfield_input)) / 100000) + int(keep_var.get())) # 0.01 dust app_log.warning("Fee: {}".format(fee)) except Exception as e: fee = 0.01 app_log.warning("Fee error: {}".format(e)) # calculate fee # check difficulty try: s = socks.socksocket() s.connect((node_ip_conf, int(port))) connections.send(s, "diffget", 10) diff = connections.receive(s, 10) s.close() except: # get locally app_log.warning("Unable to start in light mode, using local db for difficulty calculation") diff = difficulty(c) # check difficulty diff_msg = diff[1] # network status time_now = str(time.time()) last_block_ago = float(time_now) - float(db_timestamp_last) if last_block_ago > 300: sync_msg = "{}m behind".format((int(last_block_ago / 60))) sync_msg_label.config(fg='red') else: sync_msg = "Up to date\nLast block: {}s ago".format((int(last_block_ago))) sync_msg_label.config(fg='green') # network status # aliases # c.execute("SELECT openfield FROM transactions WHERE address = ? AND openfield LIKE ?;",(address,)+("alias="+'%',)) # aliases = c.fetchall() # app_log.warning("Aliases: "+str(aliases)) # aliases # fees_current_var.set("Current Fee: {}".format('%.8f' % float(fee))) balance_var.set("Balance: {}".format('%.8f' % float(balance))) debit_var.set("Spent Total: {}".format('%.8f' % float(debit))) credit_var.set("Received Total: {}".format('%.8f' % float(credit))) fees_var.set("Fees Paid: {}".format('%.8f' % float(fees))) rewards_var.set("Rewards: {}".format('%.8f' % float(rewards))) bl_height_var.set("Block Height: {}".format(bl_height)) diff_msg_var.set("Mining Difficulty: {}".format('%.2f' % float(diff_msg))) sync_msg_var.set("Network: {}".format(sync_msg)) table()
def send_confirm(amount_input, recipient_input, keep_input, openfield_input): top10 = Toplevel() top10.title("Confirm") # encr check if encrypt_var.get() == 1: #get recipient's public key connections.send(s, "pubkeyget", 10) connections.send(s, recipient_input, 10) target_public_key_hashed = connections.receive(s, 10) recipient_key = RSA.importKey( base64.b64decode(target_public_key_hashed).decode("utf-8")) #openfield_input = str(target_public_key.encrypt(openfield_input.encode("utf-8"), 32)) data = openfield_input.encode("utf-8") # print (open("pubkey.der").read()) session_key = get_random_bytes(16) cipher_aes = AES.new(session_key, AES.MODE_EAX) # Encrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(recipient_key) # Encrypt the data with the AES session key ciphertext, tag = cipher_aes.encrypt_and_digest(data) enc_session_key = (cipher_rsa.encrypt(session_key)) openfield_input = str( [x for x in (cipher_aes.nonce, tag, ciphertext, enc_session_key)]) # encr check # msg check if encode_var.get() == 1: openfield_input = base64.b64encode( openfield_input.encode("utf-8")).decode("utf-8") # msg check if msg_var.get() == 1 and encode_var.get() == 1: openfield_input = "bmsg=" + openfield_input if msg_var.get() == 1 and encode_var.get() == 0: openfield_input = "msg=" + openfield_input if encrypt_var.get() == 1: openfield_input = "enc=" + str(openfield_input) fee = fee_calculate(openfield_input, keep_var.get()) confirmation_dialog = Text(top10, width=100) confirmation_dialog.insert(INSERT, ( "Amount: {}\nFee: {}\nTotal: {}\nTo: {}\nKeep Entry: {}\nOpenField:\n\n{}" .format(amount_input, fee, '%.8f' % (float(amount_input) + float(fee)), recipient_input, keep_input, openfield_input))) confirmation_dialog.grid(row=0, pady=0) enter = Button( top10, text="Confirm", command=lambda: send_confirmed(amount_input, recipient_input, keep_input, openfield_input, top10)) enter.grid(row=1, column=0, sticky=W + E, padx=15, pady=(5, 5)) done = Button(top10, text="Cancel", command=top10.destroy) done.grid(row=2, column=0, sticky=W + E, padx=15, pady=(5, 5))