def transaction_validate(): """Validates all transaction elements. Raise a ValueError exception on error.""" # Begin with costless checks first, so we can early exit. Time of tx if tx.start_time_tx < tx.q_received_timestamp: raise ValueError( f"Future transaction not allowed, timestamp " f"{quantize_two((tx.q_received_timestamp - tx.start_time_tx) / 60)} minutes in the future" ) if node.last_block_timestamp - 86400 > tx.q_received_timestamp: raise ValueError("Transaction older than 24h not allowed.") # Amount if float(tx.received_amount) < 0: raise ValueError("Negative balance spend attempt") # Addresses validity if not essentials.address_validate(tx.received_address): raise ValueError("Not a valid sender address") if not essentials.address_validate(tx.received_recipient): raise ValueError("Not a valid recipient address") # Now we can process cpu heavier checks, decode and check sig itself buffer = str( (tx.received_timestamp, tx.received_address, tx.received_recipient, tx.received_amount, tx.received_operation, tx.received_openfield)).encode("utf-8") # Will raise if error - also includes reconstruction of address from pubkey to make sure it matches SignerFactory.verify_bis_signature(tx.received_signature_enc, tx.received_public_key_b64encoded, buffer, tx.received_address) node.logger.app_log.info( f"Valid signature from {tx.received_address} " f"to {tx.received_recipient} amount {tx.received_amount}")
def balance_differences(): print("Selecting all addresses from full ledger for errors") tar_obj.h.execute( "SELECT distinct(recipient) FROM transactions group by recipient;") addresses = tar_obj.h.fetchall() for address in addresses: address = address[0] balance1 = balance_from_cursor(tar_obj.h, address) balance2 = balance_from_cursor(tar_obj.h2, address) if (balance1 == balance2): check = ' Ok' else: check = '> Ko' tar_obj.errors += 1 if not address_validate(address) and (balance1 or balance2) != 0: print(f"{address} > wrong recipient") print(f"{check} {address} {balance1} {balance2}") if (Decimal(balance1) < 0 or Decimal(balance2) < 0): print(address, balance1, balance2) print(f"Done, {tar_obj.errors} errors.")
def transaction_validate(): """Validates all transaction elements. Raise a ValueError exception on error.""" # Begin with costless checks first, so we can early exit. Time of tx if tx.start_time_tx < tx.q_received_timestamp: raise ValueError( f"Future transaction not allowed, timestamp {quantize_two((tx.q_received_timestamp - tx.start_time_tx) / 60)} minutes in the future" ) if previous_block.q_timestamp_last - 86400 > tx.q_received_timestamp: raise ValueError("Transaction older than 24h not allowed.") # Amount if float(tx.received_amount) < 0: raise ValueError("Negative balance spend attempt") # Addresses validity if not essentials.address_validate(tx.received_address): raise ValueError("Not a valid sender address") if not essentials.address_validate(tx.received_recipient): raise ValueError("Not a valid recipient address") # Now we can process cpu heavier checks, decode and check sig itself # Check the sig format first essentials.validate_pem(tx.received_public_key_hashed) # Now extract the signature verifier. received_public_key = RSA.importKey( base64.b64decode(tx.received_public_key_hashed)) received_signature_dec = base64.b64decode(tx.received_signature_enc) verifier = PKCS1_v1_5.new(received_public_key) # Build the buffer to be verified sha_hash = SHA.new( str((tx.received_timestamp, tx.received_address, tx.received_recipient, tx.received_amount, tx.received_operation, tx.received_openfield)).encode("utf-8")) # Real sig check takes place here if not verifier.verify(sha_hash, received_signature_dec): raise ValueError(f"Invalid signature from {tx.received_address}") else: node.logger.app_log.info( f"Valid signature from {tx.received_address} to {tx.received_recipient} amount {tx.received_amount}" ) # Reconstruct address from pubkey to make sure it matches if tx.received_address != hashlib.sha224( base64.b64decode(tx.received_public_key_hashed)).hexdigest(): raise ValueError("Attempt to spend from a wrong address")
def transaction_validate(): received_public_key = RSA.importKey( base64.b64decode(tx.received_public_key_hashed)) received_signature_dec = base64.b64decode(tx.received_signature_enc) verifier = PKCS1_v1_5.new(received_public_key) essentials.validate_pem(tx.received_public_key_hashed) sha_hash = SHA.new( str((tx.received_timestamp, tx.received_address, tx.received_recipient, tx.received_amount, tx.received_operation, tx.received_openfield)).encode("utf-8")) if not verifier.verify(sha_hash, received_signature_dec): raise ValueError(f"Invalid signature from {tx.received_address}") else: node.logger.app_log.info( f"Valid signature from {tx.received_address} to {tx.received_recipient} amount {tx.received_amount}" ) if float(tx.received_amount) < 0: raise ValueError("Negative balance spend attempt") if tx.received_address != hashlib.sha224( base64.b64decode(tx.received_public_key_hashed)).hexdigest(): raise ValueError("Attempt to spend from a wrong address") if not essentials.address_validate(tx.received_address): raise ValueError("Not a valid sender address") if not essentials.address_validate(tx.received_recipient): raise ValueError("Not a valid recipient address") if tx.start_time_tx < tx.q_received_timestamp: raise ValueError( f"Future transaction not allowed, timestamp {quantize_two((tx.q_received_timestamp - tx.start_time_tx) / 60)} minutes in the future" ) if previous_block.q_timestamp_last - 86400 > tx.q_received_timestamp: raise ValueError("Transaction older than 24h not allowed.")
def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): """ Checks and merge the tx list in out mempool :param data: :param peer_ip: :param c: :param size_bypass: if True, will merge whatever the mempool size is :param wait: if True, will wait until the main db_lock is free. if False, will just drop. :param revert: if True, we are reverting tx from digest_block, so main lock is on. Don't bother, process without lock. :return: """ if not data: return "Mempool from {} was empty".format(peer_ip) mempool_result = [] if data == '*': raise ValueError("Connection lost") try: if self.peers_sent[peer_ip] > time.time(): self.app_log.warning( "Mempool ignoring merge from frozen {}".format(peer_ip)) mempool_result.append( "Mempool ignoring merge from frozen {}".format(peer_ip)) return mempool_result except: # unknown peer pass if not essentials.is_sequence(data): with self.peers_lock: self.peers_sent[peer_ip] = time.time() + 10 * 60 self.app_log.warning( "Freezing mempool from {} for 10 min - Bad TX format".format( peer_ip)) mempool_result.append("Bad TX Format") return mempool_result mempool_result.append( "Mempool merging started from {}".format(peer_ip)) if not revert: while self.db_lock.locked( ): # prevent transactions which are just being digested from being added to mempool if not wait: # not reverting, but not waiting, bye # By default, we don't wait. mempool_result.append("Locked ledger, dropping txs") return mempool_result self.app_log.warning( "Waiting for block digestion to finish before merging mempool" ) time.sleep(1) # if reverting, don't bother with main lock, go on. mempool_size = self.size( ) # caulculate current mempool size before adding txs # TODO: we check main ledger db is not locked before beginning, but we don't lock? ok, see comment in node.py. since it's called from a lock, it would deadlock. # merge mempool #while self.lock.locked(): # time.sleep(1) with self.lock: try: block_list = data if not isinstance( block_list[0], list ): # convert to list of lists if only one tx and not handled block_list = [block_list] for transaction in block_list: # set means unique, only accepts list of txs if (mempool_size < 0.3 or size_bypass) or \ (len(str(transaction[7])) > 200 and mempool_size < 0.4) \ or (Decimal(transaction[3]) > Decimal(5) and mempool_size < 0.5) \ or (transaction[1] in self.config.mempool_allowed and mempool_size < 0.6): # condition 1: size limit or bypass, # condition 2: spend more than 25 coins, # condition 3: have length of openfield larger than 200 # all transactions in the mempool need to be cycled to check for special cases, # therefore no while/break loop here mempool_timestamp = '%.2f' % (quantize_two( transaction[0])) mempool_address = str(transaction[1])[:56] mempool_recipient = str(transaction[2])[:56] mempool_amount = '%.8f' % (quantize_eight( transaction[3])) # convert scientific notation mempool_signature_enc = str(transaction[4])[:684] mempool_public_key_hashed = str(transaction[5])[:1068] mempool_operation = str(transaction[6])[:10] mempool_openfield = str(transaction[7])[:100000] # convert readable key to instance mempool_public_key = RSA.importKey( base64.b64decode(mempool_public_key_hashed)) mempool_signature_dec = base64.b64decode( mempool_signature_enc) acceptable = True try: # TODO: sure it will throw an exception? # condition 1) dummy = self.fetchall( "SELECT * FROM transactions WHERE signature = ?;", (mempool_signature_enc, )) if dummy: # self.app_log.warning("That transaction is already in our mempool") mempool_result.append( "That transaction is already in our mempool" ) acceptable = False mempool_in = True else: mempool_in = False except: #print('sigmempool NO ', mempool_signature_enc) mempool_in = False # reject transactions which are already in the ledger # TODO: not clean, will need to have ledger as a module too. # dup code atm. essentials.execute_param_c( c, "SELECT * FROM transactions WHERE signature = ?;", (mempool_signature_enc, ), self.app_log) # condition 2 try: dummy = c.fetchall()[0] #print('sigledger', mempool_signature_enc, dummy) if dummy: mempool_result.append( "That transaction is already in our ledger" ) # self.app_log.warning("That transaction is already in our ledger") # reject transactions which are already in the ledger acceptable = False ledger_in = True # Can be a syncing node. Do not request mempool from this peer until 10 min with self.peers_lock: self.peers_sent[peer_ip] = time.time( ) + 10 * 60 self.app_log.warning( "Freezing mempool from {} for 10 min.". format(peer_ip)) return mempool_result else: ledger_in = False except: #print('sigledger NO ', mempool_signature_enc) ledger_in = False # if mempool_operation != "1" and mempool_operation != "0": # mempool_result.append = ("Mempool: Wrong keep value {}".format(mempool_operation)) # acceptable = 0 if mempool_address != hashlib.sha224( base64.b64decode( mempool_public_key_hashed)).hexdigest(): mempool_result.append( "Mempool: Attempt to spend from a wrong address" ) # self.app_log.warning("Mempool: Attempt to spend from a wrong address") acceptable = False if not essentials.address_validate( mempool_address ) or not essentials.address_validate( mempool_recipient): mempool_result.append( "Mempool: Not a valid address") # self.app_log.warning("Mempool: Not a valid address") acceptable = False if quantize_eight(mempool_amount) < 0: acceptable = False mempool_result.append( "Mempool: Negative balance spend attempt") # self.app_log.warning("Mempool: Negative balance spend attempt") if quantize_two(mempool_timestamp) > time.time( ) + drift_limit: # dont accept future txs acceptable = False # dont accept old txs, mempool needs to be harsher than ledger if quantize_two( mempool_timestamp) < time.time() - 82800: acceptable = 0 # remove from mempool if it's in both ledger and mempool already if mempool_in and ledger_in: try: # Do not lock, we already have the lock for the whole merge. self.execute(SQL_DELETE_TX, (mempool_signature_enc, )) self.commit() mempool_result.append( "Mempool: Transaction deleted from our mempool" ) except: # experimental try and except mempool_result.append( "Mempool: Transaction was not present in the pool anymore" ) pass # continue to mempool finished message # verify signatures and balances essentials.validate_pem(mempool_public_key_hashed) # verify signature verifier = PKCS1_v1_5.new(mempool_public_key) my_hash = SHA.new( str((mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_operation, mempool_openfield)).encode("utf-8")) if not verifier.verify(my_hash, mempool_signature_dec): acceptable = False mempool_result.append( "Mempool: Wrong signature in mempool insert attempt: {}" .format(transaction)) # self.app_log.warning("Mempool: Wrong signature in mempool insert attempt") # verify signature if acceptable: # verify balance # mempool_result.append("Mempool: Verifying balance") mempool_result.append( "Mempool: Received address: {}".format( mempool_address)) # include mempool fees result = self.fetchall( "SELECT amount, openfield FROM transactions WHERE address = ?;", (mempool_address, )) debit_mempool = 0 if result: for x in result: debit_tx = quantize_eight(x[0]) fee = quantize_eight( essentials.fee_calculate(x[1])) debit_mempool = quantize_eight( debit_mempool + debit_tx + fee) else: debit_mempool = 0 # include the new block credit_ledger = Decimal("0") for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE recipient = ?;", (mempool_address, ), self.app_log): try: credit_ledger = quantize_eight( credit_ledger) + quantize_eight( entry[0]) credit_ledger = 0 if credit_ledger is None else credit_ledger except: credit_ledger = 0 credit = credit_ledger debit_ledger = Decimal("0") for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE address = ?;", (mempool_address, ), self.app_log): try: debit_ledger = quantize_eight( debit_ledger) + quantize_eight( entry[0]) debit_ledger = 0 if debit_ledger is None else debit_ledger except: debit_ledger = 0 debit = debit_ledger + debit_mempool fees = Decimal("0") for entry in essentials.execute_param_c( c, "SELECT fee FROM transactions WHERE address = ?;", (mempool_address, ), self.app_log): try: fees = quantize_eight( fees) + quantize_eight(entry[0]) fees = 0 if fees is None else fees except: fees = 0 rewards = Decimal("0") for entry in essentials.execute_param_c( c, "SELECT sum(reward) FROM transactions WHERE recipient = ?;", (mempool_address, ), self.app_log): try: rewards = quantize_eight( rewards) + quantize_eight(entry[0]) rewards = 0 if rewards is None else rewards except: rewards = 0 balance = quantize_eight( credit - debit - fees + rewards - quantize_eight(mempool_amount)) balance_pre = quantize_eight(credit_ledger - debit_ledger - fees + rewards) fee = essentials.fee_calculate(mempool_openfield) time_now = time.time() if quantize_two(mempool_timestamp) > quantize_two( time_now) + drift_limit: mempool_result.append( "Mempool: Future transaction not allowed, timestamp {} minutes in the future" .format( quantize_two( (quantize_two(mempool_timestamp) - quantize_two(time_now)) / 60))) # self.app_log.warning("Mempool: Future transaction not allowed, timestamp {} minutes in the future.") elif quantize_two(time_now) - 86400 > quantize_two( mempool_timestamp): mempool_result.append( "Mempool: Transaction older than 24h not allowed." ) # self.app_log.warning("Mempool: Transaction older than 24h not allowed.") elif quantize_eight( mempool_amount) > quantize_eight( balance_pre): mempool_result.append( "Mempool: Sending more than owned") # self.app_log.warning("Mempool: Sending more than owned") elif quantize_eight(balance) - quantize_eight( fee) < 0: mempool_result.append( "Mempool: Cannot afford to pay fees") # self.app_log.warning("Mempool: Cannot afford to pay fees") # verify signatures and balances else: self.execute( "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?)", (str(mempool_timestamp), str(mempool_address), str(mempool_recipient), str(mempool_amount), str(mempool_signature_enc), str(mempool_public_key_hashed), str(mempool_operation), str(mempool_openfield))) mempool_result.append( "Mempool updated with a received transaction from {}" .format(peer_ip)) self.commit() # Save (commit) the changes mempool_size = mempool_size + sys.getsizeof( str(transaction)) / 1000000.0 else: mempool_result.append( "Local mempool is already full for this tx type, skipping merging" ) # self.app_log.warning("Local mempool is already full for this tx type, skipping merging") return mempool_result # avoid spamming of the logs # TODO: Here maybe commit() on c to release the write lock? except Exception as e: self.app_log.warning("Mempool: Error processing: {} {}".format( data, e)) if self.config.debug_conf == 1: raise try: return e, mempool_result except: return mempool_result
def go(match, iterator, coordinator, league_requirement=0): game = classes.Game() def game_saved(): try: scores_db.c.execute( "SELECT * FROM scores WHERE hash = ? AND saved = ? ", ( game.hash, 1, )) result = scores_db.c.fetchone()[0] return True except: return False def game_finished(): try: scores_db.c.execute( "SELECT * FROM scores WHERE hash = ? AND finished = ? ", ( game.hash, 1, )) result = scores_db.c.fetchone()[0] return True except: return False def db_output(): try: output_weapon = hero.weapon.name except: output_weapon = None try: output_armor = hero.armor.name except: output_armor = None try: output_ring = hero.ring.name except: output_ring = None if not game_finished(): scores_db.c.execute("DELETE FROM scores WHERE hash = ?", (game.hash, )) scores_db.c.execute( "INSERT INTO scores VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", (game.properties["block"], game.hash, game.seed, hero.experience, json.dumps({ "weapon": output_weapon, "armor": output_armor, "ring": output_ring }), game.league, game.bet, json.dumps( hero.damage_table), json.dumps(hero.defense_table), game.current_block, game.finished, game.saved)) scores_db.conn.commit() def output(entry): game.step += 1 print(entry) game.story[game.step] = entry def replay_save(): if not os.path.exists("static"): os.mkdir("static") if not os.path.exists("static/replays"): os.mkdir("static/replays") if not game_saved() or not game_finished(): with open(game.filename, "w") as file: file.write(json.dumps(game.story)) scores_db.c.execute( "UPDATE scores SET saved = 1 WHERE hash = ?", (game.hash, )) scores_db.conn.commit() game.saved = True if not hero.alive: game.finished = True scores_db.c.execute( "UPDATE scores SET finished = 1 WHERE hash = ?", (game.hash, )) scores_db.conn.commit() try: assert ":" in match[11] recipient = match[11].split(":")[1] league = match[11].split(":")[0] except: recipient = match[2] league = match[11] try: assert essentials.address_validate(recipient) except: recipient = match[2] game.properties = { "seed": recipient, "block": match[0], "recipient": match[3], "amount": match[4], "league": league } game.start_block = game.properties["block"] game.recipient = game.properties["recipient"] game.bet = game.properties["amount"] game.current_block = game.start_block game.seed = game.properties["seed"] game.hash = blake2b( (game.properties["seed"] + str(game.properties["block"])).encode(), digest_size=10).hexdigest() game.enemies = game.enemies if game.recipient == coordinator and game.bet >= league_requirement: game.league = game.properties["league"] else: game.league = "casual" game.filename = "static/replays/" + str(game.hash + ".json") hero = classes.Hero() if game_finished(): game.quit = True print(f"Game {game.hash} tagged as finished, skipping") #trigger is followed by events affected by modifiers #define events EVENTS = { game.properties["seed"][2:4]: "attack", game.properties["seed"][4:6]: "attacked", game.properties["seed"][6:8]: "attack_critical" } #define triggers def enemy_dead_check(): if enemy.health < 1: hero.in_combat = False enemy.alive = False output(f"{enemy.name} died") output(f"You now have {hero.experience} experience") def hero_dead_check(): if hero.health < 1: hero.alive = False output(f"You died with {hero.experience} experience") def chaos_ring(): if not hero.ring: output( f'You see a chaos ring, the engraving says {subcycle["cycle_hash"][0:5]}' ) if subcycle["cycle_hash"][0] in ["0", "1", "2", "3", "4"]: hero.ring = classes.ChaosRing().roll_good() else: hero.ring = classes.ChaosRing().roll_bad() hero.full_hp += hero.ring.health_modifier if hero.health > hero.full_hp: hero.health = hero.full_hp output(hero.ring.string) def ragnarok(): output(f"Ragnarök begins") # add new monsters to the world for enemy in classes.Game().enemies_ragnarok: game.enemies.append(enemy) def attack(): hero.experience += 1 damage = hero.damage enemy.health -= hero.damage output( f"{enemy.name} suffers {damage} damage and is left with {enemy.health} HP" ) enemy_dead_check() def attack_critical(): hero.experience += 1 damage = hero.damage + hero.experience enemy.health -= damage output( f"{enemy.name} suffers {damage} *critical* damage and is left with {enemy.health} HP" ) enemy_dead_check() def cycle(): db.c.execute( "SELECT * FROM transactions WHERE block_height = ? ORDER BY block_height", (game.current_block, )) result = db.c.fetchall() position = 0 game.cycle = {} #remove previous cycle if exists for tx in result: position = position + 1 block_height = tx[0] timestamp = tx[1] address = tx[2] recipient = tx[3] amount = tx[4] block_hash = tx[7] operation = tx[10] data = tx[11] cycle_hash = blake2b((str(tx)).encode(), digest_size=60).hexdigest() game.cycle[position] = { "block_height": block_height, "timestamp": timestamp, "address": address, "recipient": recipient, ":amount": amount, "block_hash": block_hash, "operation": operation, "data": data, "cycle_hash": cycle_hash } def attacked(): damage_taken = enemy.damage if hero.armor: damage_taken -= hero.defense hero.health = hero.health - damage_taken output( f"{enemy.name} hits you for {enemy.damage} HP, you now have {hero.health} HP" ) hero_dead_check() while hero.alive and not game.quit: cycle() if not game.cycle: output("The game is still running") game.quit = True break for subposition, subcycle in game.cycle.items(): #for tx in block #print (subcycle) # human interaction for item_interactive_class in classes.items_interactive: if item_interactive_class( ).trigger == subcycle["data"] and subcycle[ "address"] == game.seed and subcycle[ "operation"] == game.interaction_string: if item_interactive_class == classes.ChaosRing: chaos_ring() for events_interactive_global_class in classes.events_interactive_global: if events_interactive_global_class( ).trigger == subcycle["data"] and subcycle[ "operation"] == game.interaction_string: if events_interactive_global_class == classes.Ragnarok: ragnarok() # human interaction if iterator == 2: for pvp_class in game.pvp: if pvp_class().trigger == subcycle["data"] and subcycle[ "operation"] == game.interaction_string and subcycle[ "recipient"] == game.seed and hero.pvp_interactions > 0: attacker = subcycle["address"] try: scores_db.c.execute( "SELECT damage FROM scores WHERE seed = ? AND block_start <= ? AND block_end >= ? ORDER BY block_start DESC LIMIT 1", ( attacker, game.current_block, game.current_block, )) enemy_damage_table = json.loads( scores_db.c.fetchone()[0]) for enemy_damage_block, enemy_damage_value in enemy_damage_table.items( ): if int(enemy_damage_block ) <= game.current_block: enemy_damage = int(enemy_damage_value) hero.health = hero.health - (enemy_damage - hero.defense) hero.pvp_interactions -= 1 hero_dead_check() output( f"Player {attacker} hits you and you lose {enemy_damage - hero.defense} health down to {hero.health}" ) except Exception: output( f"Player {attacker} tried to attack you, but they failed" ) for potion_class in game.potions: if potion_class( ).trigger in subcycle["cycle_hash"] and not hero.in_combat: if potion_class == classes.HealthPotion and hero.health < hero.full_hp: if hero.in_combat: hero.health = hero.health + classes.HealthPotion( ).heal_in_combat output( f"You drink a potion and heal to {hero.health} HP..." ) elif not hero.in_combat: hero.health = hero.health + classes.HealthPotion( ).heal_not_in_combat output( f"You rest and heal well to {hero.health} HP..." ) if hero.health > hero.full_hp: hero.health = hero.full_hp for armor_class in game.armors: if armor_class( ).trigger in subcycle["cycle_hash"] and not hero.in_combat: if not hero.armor: hero.armor = armor_class() hero.defense += hero.armor.defense hero.defense_table[game.current_block] = hero.defense output(f"You obtained {armor_class().name}") for weapon_class in game.weapons: if weapon_class( ).trigger in subcycle["cycle_hash"] and not hero.in_combat: if not hero.weapon: hero.weapon = weapon_class() hero.damage += hero.weapon.damage hero.damage_table[game.current_block] = hero.damage output(f"You obtained {weapon_class().name}") for enemy_class in game.enemies: if enemy_class().trigger in subcycle[ "cycle_hash"] and hero.alive and not hero.in_combat: enemy = enemy_class() output( f"You meet {enemy.name} on transaction {subposition} of block {game.current_block}" ) hero.in_combat = True for event_key in EVENTS: #check what happened if hero.in_combat and hero.alive and not game.quit: if event_key in subcycle["cycle_hash"] and enemy.alive: event = EVENTS[event_key] output(f"Event: {event}") if event == "attack": attack() elif event == "attack_critical": attack_critical() elif event == "attacked": attacked() game.current_block = game.current_block + 1 if iterator == 2: # db iteration finished, now save the story (player interactions serial, based on db) replay_save() db_output() return game, hero
def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): """ Checks and merge the tx list in out mempool :param data: :param peer_ip: :param c: :param size_bypass: if True, will merge whatever the mempool size is :param wait: if True, will wait until the main db_lock is free. if False, will just drop. :param revert: if True, we are reverting tx from digest_block, so main lock is on. Don't bother, process without lock. :return: """ global REFUSE_OLDER_THAN # Easy cases of empty or invalid data if not data: return "Mempool from {} was empty".format(peer_ip) mempool_result = [] if data == '*': raise ValueError("Connection lost") try: if self.peers_sent[peer_ip] > time.time( ) and peer_ip != '127.0.0.1': self.app_log.warning( "Mempool ignoring merge from frozen {}".format(peer_ip)) mempool_result.append( "Mempool ignoring merge from frozen {}".format(peer_ip)) return mempool_result except: # unknown peer pass if not essentials.is_sequence(data): if peer_ip != '127.0.0.1': with self.peers_lock: self.peers_sent[peer_ip] = time.time() + 10 * 60 self.app_log.warning( "Freezing mempool from {} for 10 min - Bad TX format". format(peer_ip)) mempool_result.append("Bad TX Format") return mempool_result if not revert: while self.db_lock.locked(): # prevent transactions which are just being digested from being added to mempool if not wait: # not reverting, but not waiting, bye # By default, we don't wait. mempool_result.append("Locked ledger, dropping txs") return mempool_result self.app_log.warning( "Waiting for block digestion to finish before merging mempool" ) time.sleep(1) # if reverting, don't bother with main lock, go on. # Let's really dig mempool_result.append( "Mempool merging started from {}".format(peer_ip)) # Single time reference here for the whole merge. time_now = time.time() # calculate current mempool size before adding txs mempool_size = self.size() # TODO: we check main ledger db is not locked before beginning, but we don't lock? ok, see comment in node.py. since it's called from a lock, it would deadlock. # merge mempool # while self.lock.locked(): # time.sleep(1) with self.lock: try: block_list = data if not isinstance( block_list[0], list ): # convert to list of lists if only one tx and not handled block_list = [block_list] for transaction in block_list: if size_bypass or self.space_left_for_tx( transaction, mempool_size): # all transactions in the mempool need to be cycled to check for special cases, # therefore no while/break loop here mempool_timestamp = '%.2f' % (quantize_two( transaction[0])) mempool_timestamp_float = float( transaction[0]) # limit Decimal where not needed mempool_address = str(transaction[1])[:56] mempool_recipient = str(transaction[2])[:56] mempool_amount = '%.8f' % (quantize_eight( transaction[3])) # convert scientific notation mempool_amount_float = float(transaction[3]) mempool_signature_enc = str(transaction[4])[:684] mempool_public_key_hashed = str(transaction[5])[:1068] if "b'" == mempool_public_key_hashed[:2]: mempool_public_key_hashed = transaction[5][2:1070] mempool_operation = str(transaction[6])[:30] mempool_openfield = str(transaction[7])[:100000] # Begin with the easy tests that do not require cpu or disk access if mempool_amount_float < 0: mempool_result.append( "Mempool: Negative balance spend attempt") continue if not essentials.address_validate(mempool_address): mempool_result.append( "Mempool: Invalid address {}".format( mempool_address)) continue if not essentials.address_validate(mempool_recipient): mempool_result.append( "Mempool: Invalid recipient {}".format( mempool_recipient)) continue if mempool_timestamp_float > time_now: mempool_result.append( "Mempool: Future transaction rejected {}s". format(mempool_timestamp_float - time_now)) continue if mempool_timestamp_float < time_now - REFUSE_OLDER_THAN: # don't accept old txs, mempool needs to be harsher than ledger mempool_result.append( "Mempool: Too old a transaction") continue # Then more cpu heavy tests hashed_address = hashlib.sha224( base64.b64decode( mempool_public_key_hashed)).hexdigest() if mempool_address != hashed_address: mempool_result.append( "Mempool: Attempt to spend from a wrong address {} instead of {}" .format(mempool_address, hashed_address)) continue # Crypto tests - more cpu hungry try: essentials.validate_pem(mempool_public_key_hashed) except ValueError as e: mempool_result.append( "Mempool: Public key does not validate: {}". format(e)) # recheck sig try: mempool_public_key = RSA.importKey( base64.b64decode(mempool_public_key_hashed)) mempool_signature_dec = base64.b64decode( mempool_signature_enc) verifier = PKCS1_v1_5.new(mempool_public_key) tx_signed = (mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_operation, mempool_openfield) my_hash = SHA.new(str(tx_signed).encode("utf-8")) if not verifier.verify(my_hash, mempool_signature_dec): mempool_result.append( "Mempool: Wrong signature ({}) for data {} in mempool insert attempt" .format(mempool_signature_enc, tx_signed)) continue except Exception as e: mempool_result.append( "Mempool: Unexpected error checking sig: {}". format(e)) continue # Only now, process the tests requiring db access mempool_in = self.sig_check(mempool_signature_enc) # Temp: get last block for HF reason essentials.execute_param_c( c, "SELECT block_height FROM transactions WHERE 1 ORDER by block_height DESC limit ?", (1, ), self.app_log) last_block = c.fetchone()[0] # reject transactions which are already in the ledger # TODO: not clean, will need to have ledger as a module too. essentials.execute_param_c( c, "SELECT timestamp FROM transactions WHERE signature = ?", (mempool_signature_enc, ), self.app_log) ledger_in = bool(c.fetchone()) # remove from mempool if it's in both ledger and mempool already if mempool_in and ledger_in: try: # Do not lock, we already have the lock for the whole merge. self.execute(SQL_DELETE_TX, (mempool_signature_enc, )) self.commit() mempool_result.append( "Mempool: Transaction deleted from our mempool" ) except: # experimental try and except mempool_result.append( "Mempool: Transaction was not present in the pool anymore" ) continue if ledger_in: mempool_result.append( "That transaction is already in our ledger") # Can be a syncing node. Do not request mempool from this peer until 10 min if peer_ip != '127.0.0.1': with self.peers_lock: self.peers_sent[peer_ip] = time.time( ) + 10 * 60 self.app_log.warning( "Freezing mempool from {} for 10 min.". format(peer_ip)) # Here, we point blank stop processing the batch from this host since it's outdated. # Update: Do not, since it blocks further valid tx - case has been found in real use. # return mempool_result continue # Already there, just ignore then if mempool_in: mempool_result.append( "That transaction is already in our mempool") continue # Here we covered the basics, the current tx is conform and signed. Now let's check balance. # verify balance mempool_result.append( "Mempool: Received address: {}".format( mempool_address)) # include mempool fees result = self.fetchall( "SELECT amount, openfield, operation FROM transactions WHERE address = ?", (mempool_address, )) debit_mempool = 0 if result: for x in result: debit_tx = quantize_eight(x[0]) fee = quantize_eight( essentials.fee_calculate( x[1], x[2], last_block)) debit_mempool = quantize_eight(debit_mempool + debit_tx + fee) credit = 0 for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE recipient = ?", (mempool_address, ), self.app_log): credit = quantize_eight(credit) + quantize_eight( entry[0]) debit_ledger = 0 for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE address = ?", (mempool_address, ), self.app_log): debit_ledger = quantize_eight( debit_ledger) + quantize_eight(entry[0]) debit = debit_ledger + debit_mempool fees = 0 for entry in essentials.execute_param_c( c, "SELECT fee FROM transactions WHERE address = ?", (mempool_address, ), self.app_log): fees = quantize_eight(fees) + quantize_eight( entry[0]) rewards = 0 for entry in essentials.execute_param_c( c, "SELECT sum(reward) FROM transactions WHERE recipient = ?", (mempool_address, ), self.app_log): rewards = quantize_eight(rewards) + quantize_eight( entry[0]) balance = quantize_eight( credit - debit - fees + rewards - quantize_eight(mempool_amount)) balance_pre = quantize_eight(credit - debit_ledger - fees + rewards) fee = essentials.fee_calculate(mempool_openfield, mempool_operation, last_block) if quantize_eight(mempool_amount) > quantize_eight( balance_pre): mempool_result.append( "Mempool: Sending more than owned") continue if quantize_eight(balance) - quantize_eight(fee) < 0: mempool_result.append( "Mempool: Cannot afford to pay fees") continue # Pfew! we can finally insert into mempool - all is str, type converted and enforced above self.execute( "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?)", (mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_signature_enc, mempool_public_key_hashed, mempool_operation, mempool_openfield)) mempool_result.append( "Mempool updated with a received transaction from {}" .format(peer_ip)) mempool_result.append("Success") self.commit( ) # Save (commit) the changes to mempool db mempool_size += sys.getsizeof( str(transaction)) / 1000000.0 else: mempool_result.append( "Local mempool is already full for this tx type, skipping merging" ) # self.app_log.warning("Local mempool is already full for this tx type, skipping merging") # TEMP # print("Mempool insert", mempool_result) return mempool_result # TODO: Here maybe commit() on c to release the write lock? except Exception as e: self.app_log.warning("Mempool: Error processing: {} {}".format( data, e)) if self.config.debug_conf == 1: raise return mempool_result
def merge(self, data: list, peer_ip: str, c, size_bypass: bool = False, wait: bool = False, revert: bool = False) -> list: """ Checks and merge the tx list in out mempool :param data: :param peer_ip: :param c: :param size_bypass: if True, will merge whatever the mempool size is :param wait: if True, will wait until the main db_lock is free. if False, will just drop. :param revert: if True, we are reverting tx from digest_block, so main lock is on. Don't bother, process without lock. :return: """ global REFUSE_OLDER_THAN # Easy cases of empty or invalid data if not data: return ["Mempool from {} was empty".format(peer_ip)] mempool_result = [] if data == '*': raise ValueError("Connection lost") try: if self.peers_sent[peer_ip] > time.time( ) and peer_ip != '127.0.0.1': self.app_log.warning( "Mempool ignoring merge from frozen {}".format(peer_ip)) mempool_result.append( "Mempool ignoring merge from frozen {}".format(peer_ip)) return mempool_result except: # unknown peer pass if not essentials.is_sequence(data): if peer_ip != '127.0.0.1': with self.peers_lock: self.peers_sent[peer_ip] = time.time() + 10 * 60 self.app_log.warning( "Freezing mempool from {} for 10 min - Bad TX format". format(peer_ip)) mempool_result.append("Bad TX Format") return mempool_result if not revert: while self.db_lock.locked(): # prevent transactions which are just being digested from being added to mempool if not wait: # not reverting, but not waiting, bye # By default, we don't wait. mempool_result.append("Locked ledger, dropping txs") return mempool_result self.app_log.warning( "Waiting for block digestion to finish before merging mempool" ) time.sleep(1) # if reverting, don't bother with main lock, go on. # Let's really dig mempool_result.append( "Mempool merging started from {}".format(peer_ip)) # Single time reference here for the whole merge. time_now = time.time() # calculate current mempool size before adding txs mempool_size = self.size() # TODO: we check main ledger db is not locked before beginning, but we don't lock? ok, see comment in node.py. since it's called from a lock, it would deadlock. # merge mempool # while self.lock.locked(): # time.sleep(1) with self.lock: try: block_list = data if not isinstance( block_list[0], list ): # convert to list of lists if only one tx and not handled block_list = [block_list] for transaction in block_list: if size_bypass or self.space_left_for_tx( transaction, mempool_size): # all transactions in the mempool need to be cycled to check for special cases, # therefore no while/break loop here try: mempool_timestamp = '%.2f' % (quantize_two( transaction[0])) mempool_timestamp_float = float( transaction[0] ) # limit Decimal where not needed except Exception as e: mempool_result.append( "Mempool: Invalid timestamp {}".format( transaction[0])) if not essentials.address_validate(transaction[1]): mempool_result.append( "Mempool: Invalid address {}".format( transaction[1])) continue # We could now ignore the truncates here, I left them for explicit reminder of the various fields max lengths. mempool_address = str(transaction[1])[:56] if not essentials.address_validate(transaction[2]): mempool_result.append( "Mempool: Invalid recipient {}".format( transaction[2])) continue mempool_recipient = str(transaction[2])[:56] try: mempool_amount = '%.8f' % (quantize_eight( transaction[3])) # convert scientific notation mempool_amount_float = float(transaction[3]) except Exception as e: mempool_result.append( "Mempool: Invalid amount {}".format( transaction[3])) continue if len(transaction[4]) > 684: mempool_result.append( "Mempool: Invalid signature len{}".format( len(transaction[4]))) continue mempool_signature_enc = str(transaction[4])[:684] if len(transaction[5]) > 1068: mempool_result.append( "Mempool: Invalid pubkey len{}".format( len(transaction[5]))) continue mempool_public_key_b64encoded = str( transaction[5])[:1068] if "b'" == mempool_public_key_b64encoded[:2]: # Binary content instead of str - leftover from legacy code? mempool_public_key_b64encoded = transaction[5][ 2:1070] if len(transaction[6]) > 30: mempool_result.append( "Mempool: Invalid operation len{}".format( len(transaction[6]))) continue mempool_operation = str(transaction[6])[:30] if len(transaction[7]) > 100000: mempool_result.append( "Mempool: Invalid openfield len{}".format( len(transaction[7]))) continue mempool_openfield = str(transaction[7])[:100000] if len(mempool_openfield) <= 4: # no or short message for a mandatory message if mempool_recipient in self.config.mandatory_message.keys( ): mempool_result.append( "Mempool: Missing message - {}".format( self.config. mandatory_message[mempool_recipient])) continue # Begin with the easy tests that do not require cpu or disk access if mempool_amount_float < 0: mempool_result.append( "Mempool: Negative balance spend attempt") continue if mempool_timestamp_float > time_now: mempool_result.append( "Mempool: Future transaction rejected {}s". format(mempool_timestamp_float - time_now)) continue if mempool_timestamp_float < time_now - REFUSE_OLDER_THAN: # don't accept old txs, mempool needs to be harsher than ledger mempool_result.append( "Mempool: Too old a transaction") continue # Then more cpu heavy tests buffer = str((mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_operation, mempool_openfield)).encode("utf-8") # Will raise if error try: SignerFactory.verify_bis_signature( mempool_signature_enc, mempool_public_key_b64encoded, buffer, mempool_address) except Exception as e: mempool_result.append( f"Mempool: Signature did not match for address ({e})" ) continue # Only now, process the tests requiring db access mempool_in = self.sig_check(mempool_signature_enc) # Temp: get last block for HF reason essentials.execute_param_c( c, "SELECT block_height FROM transactions WHERE 1 ORDER by block_height DESC limit ?", (1, ), self.app_log) last_block = c.fetchone()[0] # reject transactions which are already in the ledger # TODO: not clean, will need to have ledger as a module too. # TODO: need better txid index, this is very sloooooooow if self.config.old_sqlite: essentials.execute_param_c( c, "SELECT timestamp FROM transactions WHERE signature = ?1", (mempool_signature_enc, ), self.app_log) else: essentials.execute_param_c( c, "SELECT timestamp FROM transactions WHERE substr(signature,1,4) = substr(?1,1,4) AND signature = ?1", (mempool_signature_enc, ), self.app_log) ledger_in = bool(c.fetchone()) # remove from mempool if it's in both ledger and mempool already if mempool_in and ledger_in: try: # Do not lock, we already have the lock for the whole merge. if self.config.old_sqlite: self.execute(SQL_DELETE_TX_OLD, (mempool_signature_enc, )) else: self.execute(SQL_DELETE_TX, (mempool_signature_enc, )) self.commit() mempool_result.append( "Mempool: Transaction deleted from our mempool" ) except: # experimental try and except mempool_result.append( "Mempool: Transaction was not present in the pool anymore" ) continue if ledger_in: mempool_result.append( "That transaction is already in our ledger") # Can be a syncing node. Do not request mempool from this peer until FREEZE_MIN min # ledger_in is the ts of the tx in ledger. if it's recent, maybe the peer is just one block late. # give him 15 minute margin. if (peer_ip != '127.0.0.1') and ( ledger_in < time_now - 60 * 15): with self.peers_lock: self.peers_sent[peer_ip] = time.time( ) + FREEZE_MIN * 60 self.app_log.warning( "Freezing mempool from {} for {} min.". format(peer_ip, FREEZE_MIN)) # Here, we point blank stop processing the batch from this host since it's outdated. # Update: Do not, since it blocks further valid tx - case has been found in real use. # return mempool_result continue # Already there, just ignore then if mempool_in: mempool_result.append( "That transaction is already in our mempool") continue # Here we covered the basics, the current tx is conform and signed. Now let's check balance. # verify balance mempool_result.append( "Mempool: Received address: {}".format( mempool_address)) # include mempool fees result = self.fetchall( "SELECT amount, openfield, operation FROM transactions WHERE address = ?", (mempool_address, )) debit_mempool = 0 if result: for x in result: debit_tx = quantize_eight(x[0]) fee = quantize_eight( essentials.fee_calculate( x[1], x[2], last_block)) debit_mempool = quantize_eight(debit_mempool + debit_tx + fee) credit = 0 for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE recipient = ?", (mempool_address, ), self.app_log): credit = quantize_eight(credit) + quantize_eight( entry[0]) debit_ledger = 0 for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE address = ?", (mempool_address, ), self.app_log): debit_ledger = quantize_eight( debit_ledger) + quantize_eight(entry[0]) debit = debit_ledger + debit_mempool fees = 0 for entry in essentials.execute_param_c( c, "SELECT fee FROM transactions WHERE address = ?", (mempool_address, ), self.app_log): fees = quantize_eight(fees) + quantize_eight( entry[0]) rewards = 0 for entry in essentials.execute_param_c( c, "SELECT sum(reward) FROM transactions WHERE recipient = ?", (mempool_address, ), self.app_log): rewards = quantize_eight(rewards) + quantize_eight( entry[0]) # error conversion from NoneType to Decimal is not supported balance = quantize_eight( credit - debit - fees + rewards - quantize_eight(mempool_amount)) balance_pre = quantize_eight(credit - debit_ledger - fees + rewards) fee = essentials.fee_calculate(mempool_openfield, mempool_operation, last_block) # print("Balance", balance, fee) if quantize_eight(mempool_amount) > quantize_eight( balance_pre): # mp amount is already included in "balance" var! also, that tx might already be in the mempool mempool_result.append( "Mempool: Sending more than owned") continue if quantize_eight(balance) - quantize_eight(fee) < 0: mempool_result.append( "Mempool: Cannot afford to pay fees") continue # Pfew! we can finally insert into mempool - all is str, type converted and enforced above self.execute( "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?)", (mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_signature_enc, mempool_public_key_b64encoded, mempool_operation, mempool_openfield, int(time_now))) mempool_result.append( "Mempool updated with a received transaction from {}" .format(peer_ip)) mempool_result.append( "Success" ) # WARNING: Do not change string or case ever! self.commit( ) # Save (commit) the changes to mempool db mempool_size += sys.getsizeof( str(transaction)) / 1000000.0 else: mempool_result.append( "Local mempool is already full for this tx type, skipping merging" ) # self.app_log.warning("Local mempool is already full for this tx type, skipping merging") # TEMP # print("Mempool insert", mempool_result) return mempool_result # TODO: Here maybe commit() on c to release the write lock? except Exception as e: self.app_log.warning("Mempool: Error processing: {} {}".format( data, e)) if self.config.debug: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split( exc_tb.tb_frame.f_code.co_filename)[1] self.app_log.warning("{} {} {}".format( exc_type, fname, exc_tb.tb_lineno)) mempool_result.append("Exception: {}".format(str(e))) # if left there, means debug can *not* be used in production, or exception is not sent back to the client. raise return mempool_result
#rewards = stats_account[4] print("Transaction address: %s" % address) print("Transaction address balance: %s" % balance) try: amount_input = sys.argv[1] except IndexError: amount_input = input("Amount: ") try: recipient_input = sys.argv[2] except IndexError: recipient_input = input("Recipient: ") if not address_validate(recipient_input): print("Wrong recipient address format") exit(1) try: operation_input = sys.argv[3] except IndexError: operation_input = 0 try: openfield_input = sys.argv[4] except IndexError: openfield_input = input("Enter openfield data (message): ") # hardfork fee display fee = fee_calculate(openfield_input)