def create_block(): with open(parentdir + "/core" + "/key.pem") as f: key = RSA.importKey(f.read()) """ First transaction for first block """ trans1 = { "vin": [None], # First transaction "vout": [{ "amount": 1000000.0, "address": key.publickey().exportKey().decode() }] } trans1["txid"] = sha(json.dumps(trans1)) block1 = { "header": { "prev_block_hash": None, "merkle_root": None, "timestamp": None, "nonce": None, "difficulty_target": "0" }, "transactions": [trans1] } utxo = UTXO() blockdb = Blockdb() blockdb.add_block(block1) utxo.add_trans(block1['transactions'], sha(json.dumps(block1['header'])))
def __init__(self): self.utxo = UTXO() self.blockdb = Blockdb() self.connections = {} # {ip : conn} with open(parentdir + "/network" + "/addr.json") as file: self.ips = json.load(file) self.startup_connect()
def get_block(self, conn): blockdb = Blockdb() primary_key = int(conn.recv(1024).decode()) blocks = blockdb.get_from(primary_key) conn.send(str(len(blocks)).encode()) if len(blocks) == 0: return None path = os.path.dirname(os.getcwd()) for filename in blocks[3]: # WHAT IF RETURNS NONE file = path + "/core/" + filename + ".json" size = str(os.path.getsize(file)) conn.send(size.encode()) with open(os.getcwd() + "/addr.json") as file: data = json.load(file) data = json.dumps(data) conn.sendall(data.encode())
def new_block(self, conn, block): utxo = UTXO() blockdb = Blockdb() # check if block is already in the local chain exists = blockdb.get_block_by_hash(sha(block)) if exists != None: return None # ensure the block is valid if not self.verify("block", block): return None logging.debug("New block verified") # remove all transaction in the block from unconfirmed pool self.pool.check_new_block(sha(block)) # add all transaction outputs to utxo utxo.add_trans(block['transactions'], sha(json.dumps(block))) # remove all inputs from utxo utxo.remove_trans(block['transactions']) # save block in Blockdb blockdb.add_block(block) # propogate block self.client.prop_block(json.dumps(block).encode())
class Client(): """ Client should initiate all data transfer - progpogate for new transactions - progpogate for new block - request recent blocks - request IPs """ PORT = 5050 DISCONNECT_MESSAGE = "DISCONNECT" def __init__(self): self.utxo = UTXO() self.blockdb = Blockdb() self.connections = {} # {ip : conn} with open(parentdir + "/network" + "/addr.json") as file: self.ips = json.load(file) self.startup_connect() def startup_connect(self): """ This method is called when a node first enters/reenters a network, it queries its database of known nodes in the network and establishes a connection """ for num, ip in enumerate( self.ips.keys()): # a list of ip addresses to connect to client = socket.socket( socket.AF_INET, socket.SOCK_STREAM ) # IPv4, TCP ## SHOULD IT BE IN THE FOR LOOP ??? try: client.connect((ip, self.PORT)) self.connections[ip] = client print("Client connected to :", ip) except: ## ENSURE THE FAILURE IS DUE TO A SERVER BEING DOWN self.failed_conn(ip) def connect_to_ip(self, ip): ## CHECK TO ENSURE CONNECTION IS NOT ALREADY MADE """ When a new node on the network connects to local node, this method is establisehd a two-way connection """ if ip in self.connections.keys(): return None client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: client.connect((ip, self.PORT)) self.connections[ip] = client except: self.failed_conn(ip) else: self.add_ip(ip) def add_ip(self, ip): if ip not in self.ips.keys(): self.ips[ip] = 0 def failed_conn(self, ip): """ When a node can't be reached this method is called """ if self.ips[ip] < 3: self.ips[ip] += 1 else: del self.ips[ip] def prop_trans(self, trans): """ trans : dict dictionary containing transaction data """ trans = json.dumps(trans) for conn in self.connections.values(): conn.send("NEW_TRANS".encode()) _ = conn.recv(1024).decode() trans_encoded = trans.encode() trans_size = str(sys.getsizeof(trans_encoded)) conn.send(trans_size.encode()) _ = conn.recv(1024).decode() conn.sendall( trans_encoded ) ## RETURNS NONE IF SUCESSFUL, THROWS ERROR OTHERWISE, ADD ERROR HANDLING print("Transaction Sent") def prop_block(self, block): block = json.dumps(block) for conn in self.connections.values(): conn.send("NEW_BLOCK".encode()) block_encoded = block.encode() block_size = str(sys.getsizeof(block_encoded)) conn.send(block_size.encode()) conn.sendall(block_encoded) print("Block Sent") def req_chain(self): longest = (None, 0) for conn in self.connections.values(): conn.send("GET_CHAIN_LEN".encode()) chain_len = int(conn.recv(1024).decode()) if chain_len > longest[1]: longest = (conn, chain_len) latest = self.blockdb.get_latest() # in case the server can't connect to any nodes if longest[0] == None: print("No nodes are currently available") elif longest[1] <= latest: print("Blockchain is up to date") else: # once longest chain is found request the blocks self.req_block(longest[0]) def req_block(self, conn): """ conn : socket object the connection with the longest chain Send the hash of the latest block all nodes will send back the number of missing blocks, this node will extend using the longest chain method """ latest = self.blockdb.get_latest() conn.send("GET_BLOCKS".encode()) conn.send(str(latest).encode()) num_blocks = int(conn.recv(1024).decode()) block_num = 1 while block_num <= num_blocks: block_size = int(conn.recv(1024).decode()) block = conn.recv(1024) while len(block) < block_size: block += conn.recv(1024) block = json.loads(block.decode()) self.utxo.add_trans(block['transactions'], sha(json.dumps(block))) self.blockdb.add_block(block) ## DOESNT VARIFY THE BLOCK block_num += 1 print("Blockchain Updated") def req_node(self): for conn in self.connections.values(): conn.send("GET_NODES") file_size = int(conn.recv(1024).decode()) data = conn.recv(1024).decode() while len(data) < file_size: data += conn.recv(1024) data = json.loads(data.decode()) for key in data.keys(): if key not in self.ips.keys: self.ips[key] = 0 ## HAVE TO RECIEVE LOTS OF DATA def close(self): for conn in self.connections.values(): conn.send(self.DISCONNECT_MESSAGE.encode()) conn.close()
def __init__(self, pool): self.pool = pool self.utxo = UTXO() self.blockdb = Blockdb() self.path = parentdir + "/blocks"
class Verify(): def __init__(self, pool): self.pool = pool self.utxo = UTXO() self.blockdb = Blockdb() self.path = parentdir + "/blocks" def get_block_num(self): """ make seperate method because block number is always changing """ return self.blockdb.get_latest() def verify_block(self, block): """ Valid block: - valid transactions - valid proof of work - valid merkle root - valid hash of previous block """ root = self.verify_root(block) proof_work = self.verify_pow(block['header']) prev_hash = self.verify_prev_block(block['header']) for trans in block['transactions']: t = self.verify_trans(trans) if not t: return False if root and proof_work and prev_hash: return True else: return False def verify_root(self, block): given_root = block['header']['merkle_root'] #computed_root = MerkleTree(block['transactions']) # block['transactions'] should be a list computed_root = "1" if given_root == computed_root: return True else: return False def verify_pow(self, header): diff = header["difficulty_target"] hashed = sha(json.dumps(header)) if hashed[:len(diff)] == diff: return True else: return False def verify_prev_block(self, header): block_num = self.get_block_num() with open(self.path + f"/block_{block_num}.json") as f: block = json.load(f) hashed = sha(json.dumps(block['header'])) if hashed == header['prev_block_hash']: return True else: return False def verify_trans(self, trans): """ Valid transactions: - valid hash (txid) - input amount >= output amount - valid signatures - ensure transaction is not double spent """ v_hash = self.verify_hash(trans) v_sig = self.verify_sig(trans) v_bal = self.verify_balance(trans) v_double = self.double_spend(trans) if v_hash and v_sig and v_bal and v_double: return True else: return False def get_prev_trans(self, txid): """ txid : string hash of the transaction to be retrieved """ block_hash = self.utxo.get_by_txid(txid)[-1] filename = self.blockdb.get_block_by_hash(block_hash)[-1] with open(self.path + f"/{filename}") as f: trans = json.load(f)['transactions'] for t in trans: if t['txid'] == txid: break return t def verify_hash(self, trans): """ trans : dict transaction data structure returns : boolean True if txid matches the hash of the transaction """ tcopy = trans.copy() del tcopy['txid'] hashed = sha(json.dumps(tcopy)) if trans['txid'] == hashed: return True else: return False def verify_sig(self, trans): """ trans : dict transaction data structure returns : boolean True if all signatures match, False otherwise """ for t in trans['vin']: sig = t['sig'] pk = self.utxo.get_by_txid(t['txid'])[2] # (id, txid, address, change, amount, block) pk = RSA.importKey(pk) if not pk.verify(bytes.fromhex(t['txid']), sig): return False return True def verify_balance(self, trans): """ trans : dict transaction data structure returns : boolean True if all input > output, False otherwise """ in_amount = 0 out_amount = 0 for i in range(len(trans['vout'])): out_amount += trans['vout'][i]['amount'] for t in trans['vin']: prev_trans = self.get_prev_trans(t['txid']) change = t['change'] trans = prev_trans['vout'][change] in_amount += trans['amount'] if in_amount >= out_amount: return True else: return False def double_spend(self, trans): """ - check all transaction after the "input" transaction, ensure that no output has this transaction as input - check current pool to ensure no other transaction has the same input complexity: O(m) + O(n), m --> # of inputs, n --> # of transactions to check """ # check block chain txids, start = [ ], [ ] for t in trans['vin']: # vin is a list of dictionaries txids.append(t['txid']) block_hash = self.utxo.get_by_txid(t['txid'])[-1] block_num = self.blockdb.get_block_by_hash(block_hash)[0] start.append(block_num) end_block = self.get_block_num() for block_num in range(min(start), end_block+1): if min(start) == 1: continue ## The genesis block is skipped with open(self.path + f"/block_{block_num}.json") as f: trans = json.load(f)['transactions'] for t in trans: check = [i['txid'] for i in t['vin']] print("txids: ", txids) print("CHECK: ", check) if any(ids in check for ids in txids): return False # check pool for txid in txids: if self.pool.check_in_pool(txid): return False return True
def get_chain_len(self, conn): blockdb = Blockdb() latest = blockdb.get_latest() conn.send(str(latest).encode())