コード例 #1
0
    def hn_reward(self, node, block_array, miner_tx, mirror_hash):
        fork = Fork()

        if node.is_testnet and node.last_block >= fork.POW_FORK_TESTNET:
            self.reward_sum = 24 - 10 * (node.last_block + 5 -
                                         fork.POW_FORK_TESTNET) / 3000000

        elif node.is_mainnet and node.last_block >= fork.POW_FORK:
            self.reward_sum = 24 - 10 * (node.last_block + 5 -
                                         fork.POW_FORK) / 3000000
        else:
            self.reward_sum = 24

        if self.reward_sum < 0.5:
            self.reward_sum = 0.5

        self.reward_sum = '{:.8f}'.format(self.reward_sum)

        self.execute_param(
            self.c, self.SQL_TO_TRANSACTIONS,
            (-block_array.block_height_new, str(
                miner_tx.q_block_timestamp), "Hypernode Payouts",
             "3e08b5538a4509d9daa99e01ca5912cda3e98a7f79ca01248c2bde16",
             self.reward_sum, "0", "0", mirror_hash, "0", "0", "0", "0"))
        self.commit(self.conn)
コード例 #2
0
ファイル: dbhandler.py プロジェクト: iyomisc/Bismuth
    def hn_reward(self, node, block_array, miner_tx, mirror_hash):
        fork = Fork()

        if node.last_block >= fork.POW_FORK or (
                node.is_testnet and node.last_block >= fork.POW_FORK_TESTNET):
            self.reward_sum = "24"
        else:
            self.reward_sum = "8"

        self.execute_param(
            self.c, self.SQL_TO_TRANSACTIONS,
            (-block_array.block_height_new, str(
                miner_tx.q_block_timestamp), "Hypernode Payouts",
             "3e08b5538a4509d9daa99e01ca5912cda3e98a7f79ca01248c2bde16",
             self.reward_sum, "0", "0", mirror_hash, "0", "0", "0", "0"))
        self.commit(self.conn)
コード例 #3
0
    def __init__(self, no_philosophers):
        self.no_philosophers = no_philosophers
        self.no_forks = no_philosophers

        self.philosophers = []
        self.forks = []

        for index in range(self.no_forks):
            self.forks.append(Fork(index))

        for index in range(self.no_philosophers):
            ph = Philosopher(f'PH_{index}')
            left_fork = self.forks[(index + 1) % self.no_forks]
            right_fork = self.forks[index]
            ph.set_left_fork(left_fork)
            ph.set_right_fork(right_fork)
            self.philosophers.append(ph)
コード例 #4
0
    def start(self):
        ## create the forks
        self.forks = map(lambda i: Fork(), range(0, self.count))

        # create the philosophers and pass them their index and
        # their left- and righthand forks
        self.philosophers = []
        for i in range(0, self.count):
            self.philosophers.append(
                self.philosopherClass(i, self.__leftFork(i),
                                      self.__rightFork(i)))

        # create a thread per philosopher
        self.threads = map(
            lambda p: Thread(target=self.__philosopherMain, args=[p]),
            self.philosophers)

        # start the actual simulation
        totalRuntime = self.__run()

        #display results
        self.__displayRuntimes(totalRuntime)
コード例 #5
0
def difficulty(node, db_handler):
    try:
        fork = Fork()

        db_handler.execute(db_handler.c, "SELECT * FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 2")
        result = db_handler.c.fetchone()

        timestamp_last = Decimal(result[1])
        block_height = int(result[0])

        node.last_block_timestamp = timestamp_last
        #node.last_block = block_height do not fetch this here, could interfere with block saving

        previous = db_handler.c.fetchone()

        node.last_block_ago = int(time.time() - int(timestamp_last))

        # Failsafe for regtest starting at block 1}
        timestamp_before_last = timestamp_last if previous is None else Decimal(previous[1])

        db_handler.execute_param(db_handler.c, (
            "SELECT timestamp FROM transactions WHERE block_height > ? AND reward != 0 ORDER BY block_height ASC LIMIT 2"),
                                 (block_height - 1441,))
        timestamp_1441 = Decimal(db_handler.c.fetchone()[0])
        block_time_prev = (timestamp_before_last - timestamp_1441) / 1440
        temp = db_handler.c.fetchone()
        timestamp_1440 = timestamp_1441 if temp is None else Decimal(temp[0])
        block_time = Decimal(timestamp_last - timestamp_1440) / 1440

        db_handler.execute(db_handler.c, "SELECT difficulty FROM misc ORDER BY block_height DESC LIMIT 1")
        diff_block_previous = Decimal(db_handler.c.fetchone()[0])

        time_to_generate = timestamp_last - timestamp_before_last

        if node.is_regnet:
            return (float('%.10f' % regnet.REGNET_DIFF), float('%.10f' % (regnet.REGNET_DIFF - 8)), float(time_to_generate),
                    float(regnet.REGNET_DIFF), float(block_time), float(0), float(0), block_height)

        hashrate = pow(2, diff_block_previous / Decimal(2.0)) / (
                block_time * math.ceil(28 - diff_block_previous / Decimal(16.0)))
        # Calculate new difficulty for desired blocktime of 60 seconds
        target = Decimal(60.00)
        ##D0 = diff_block_previous
        difficulty_new = Decimal(
            (2 / math.log(2)) * math.log(hashrate * target * math.ceil(28 - diff_block_previous / Decimal(16.0))))
        # Feedback controller
        Kd = 10
        difficulty_new = difficulty_new - Kd * (block_time - block_time_prev)
        diff_adjustment = (difficulty_new - diff_block_previous) / 720  # reduce by factor of 720

        if diff_adjustment > Decimal(1.0):
            diff_adjustment = Decimal(1.0)

        difficulty_new_adjusted = quantize_ten(diff_block_previous + diff_adjustment)
        difficulty = difficulty_new_adjusted

        #fork handling
        if node.is_mainnet:
            if block_height == fork.POW_FORK - fork.FORK_AHEAD:
                fork.limit_version(node)
        #fork handling

        diff_drop_time = Decimal(180)

        if Decimal(time.time()) > Decimal(timestamp_last) + Decimal(2 * diff_drop_time):
            # Emergency diff drop
            time_difference = quantize_two(time.time()) - quantize_two(timestamp_last)
            diff_dropped = quantize_ten(difficulty) - quantize_ten(1) \
                           - quantize_ten(10 * (time_difference - 2 * diff_drop_time) / diff_drop_time)
        elif Decimal(time.time()) > Decimal(timestamp_last) + Decimal(diff_drop_time):
            time_difference = quantize_two(time.time()) - quantize_two(timestamp_last)
            diff_dropped = quantize_ten(difficulty) + quantize_ten(1) - quantize_ten(time_difference / diff_drop_time)
        else:
            diff_dropped = difficulty

        if difficulty < 50:
            difficulty = 50
        if diff_dropped < 50:
            diff_dropped = 50

        return (
            float('%.10f' % difficulty), float('%.10f' % diff_dropped), float(time_to_generate), float(diff_block_previous),
            float(block_time), float(hashrate), float(diff_adjustment),
            block_height)  # need to keep float here for database inserts support
    except: #new chain or regnet
        difficulty = [24,24,0,0,0,0,0,0]
        return difficulty
コード例 #6
0
ファイル: main.py プロジェクト: hammii-hamster/flappy-birb
def main():
    global Ticks, loopCount, score
    num = 70
    while True:
        clock.tick(60)
        if loopCount % 70 == 35:
            coins.add(
                Coin((int(screen_info.current_w),
                      random.randint(0, int(screen_info.current_h)))))
        if loopCount % 70 == 0:
            toppos = random.randint(0, height / 2) - 400
            forks.add(Fork((width - 100, toppos + gapsize + 650)))
            forks.add(Fork((width - 100, toppos), True))
            scorebars.add(Scorebar((width - 100, 0)))

        for event in pygame.event.get():
            if event.type == QUIT:
                sys.exit()
            if event.type == pygame.KEYDOWN:
                if event.key == pygame.K_SPACE:
                    player.speed[1] = -10

        screen.fill(color)
        player.update()
        forks.update()
        scorebars.update()
        coins.update()
        gets_coins = pygame.sprite.spritecollide(player, coins, False)
        gets_score = pygame.sprite.spritecollide(player, scorebars, False)
        gets_hit = pygame.sprite.spritecollide(player, forks, False)
        gets_hit2 = player.rect.center[1] > height
        screen.blit(background, [0, 0])
        forks.draw(screen)
        scorebars.draw(screen)
        coins.draw(screen)
        screen.blit(player.image, player.rect)

        font = pygame.font.SysFont(None, 70)
        text = font.render("Score: " + str(score), True, (
            0,
            0,
            0,
        ))
        text_rect = text.get_rect()
        text_rect.center = width / 2, height / 2
        screen.blit(text, text_rect)

        pygame.display.flip()
        if num > 1:
            num -= 0.00001
        loopCount += 1

        if gets_coins:
            score += 10
            coins.remove(gets_coins)

        if gets_score:
            score += 1
            scorebars.remove(gets_score)

        if gets_hit:
            lose()
            break

        if gets_hit2:
            losefall()
            break
コード例 #7
0
import hashlib
import os
import sys

import essentials
import mempool as mp
import mining_heavy3
from difficulty import *
from essentials import address_is_rsa, checkpoint_set, ledger_balance3
from polysign.signerfactory import SignerFactory
from fork import Fork
import tokensv2 as tokens
from decimal import Decimal

fork = Fork()


def digest_block(node, data, sdef, peer_ip, db_handler):
    """node param for imports"""
    class Transaction:
        def __init__(self):
            self.start_time_tx = 0
            self.q_received_timestamp = 0
            self.received_timestamp = "0.00"
            self.received_address = None
            self.received_recipient = None
            self.received_amount = 0
            self.received_signature_enc = None
            self.received_public_key_b64encoded = None
            self.received_operation = None
            self.received_openfield = None
コード例 #8
0
"""
from docopt import docopt
import sys

from processor import Processor
from fork import Fork
from fork import ForkConfig
from unit_e_substituter import UnitESubstituter

if __name__ == "__main__":
    arguments = docopt(__doc__)
    processor = Processor(ForkConfig())
    unit_e_branch = arguments["--unit-e-branch"]
    bitcoin_branch = arguments["--bitcoin-branch"]
    if arguments["fork"]:
        Fork(unit_e_branch, bitcoin_branch).run()
    elif arguments["file"]:
        filename = arguments["<filename>"]
        print(f"Substituting strings in file {filename}")
        processor.substitute_bitcoin_core_identifier_in_file(filename)
        processor.substitute_bitcoin_identifier_in_file(filename)
        processor.replace_in_file(filename,
                                  "BTC",
                                  "UTE",
                                  match_before="$|[^a-bd-ln-tv-zA-Z]")
    elif arguments["substitute-unit-e-naming"]:
        UnitESubstituter().substitute_naming(processor)
    elif arguments["substitute-unit-e-urls"]:
        UnitESubstituter().substitute_urls(processor)
    elif arguments["substitute-unit-e-executables"]:
        UnitESubstituter().substitute_executables(processor)
コード例 #9
0
def main():
    fork = Fork()  #objeto grafo instanciado
    #fork.degree_vertices()
    #fork.show_neighbors()
    deepSearch = DeepSearch(
        fork)  #busca profunda deve receber um grafo para percorrer
コード例 #10
0
    def start_solving(self):
        self._init_matrices()

        # Adop Action Definition
        self.probability[Action.Adopt][:,
                                       self._convert_state_to_number(
                                           1, 0, Fork.Irrelevant)] = self.alpha
        self.probability[
            Action.Adopt][:,
                          self._convert_state_to_number(0, 1, Fork.Irrelevant
                                                        )] = 1 - self.alpha

        for i in range(self.state_number):
            state = self._convert_number_to_state(i)
            attacker_length = state[0]
            honest_length = state[1]
            fork = Fork(state[2])

            # Adop Action Definition
            self.honest_reward[Action.Adopt][
                i, self._convert_state_to_number(1, 0, Fork.Irrelevant
                                                 )] = honest_length
            self.honest_reward[Action.Adopt][
                i, self._convert_state_to_number(0, 1, Fork.Irrelevant
                                                 )] = honest_length

            # Override Action Definition
            if attacker_length > honest_length:
                self.probability[Action.Override][
                    i,
                    self._convert_state_to_number(
                        attacker_length -
                        honest_length, 0, Fork.Irrelevant)] = self.alpha
                self.attacker_reward[Action.Override][
                    i,
                    self._convert_state_to_number(
                        attacker_length -
                        honest_length, 0, Fork.Irrelevant)] = honest_length + 1
                self.probability[Action.Override][
                    i,
                    self._convert_state_to_number(
                        attacker_length - honest_length -
                        1, 1, Fork.Relevant)] = 1 - self.alpha
                self.attacker_reward[Action.Override][
                    i,
                    self._convert_state_to_number(
                        attacker_length - honest_length -
                        1, 1, Fork.Relevant)] = honest_length + 1
            else:
                self.probability[Action.Override][i, 1] = 1
                self.honest_reward[Action.Override][i, 1] = 10000

            # Wait Action Definition
            if fork != Fork.Active and attacker_length + 1 <= self.max_fork_len and honest_length + 1 <= self.max_fork_len:
                self.probability[Action.Wait][
                    i,
                    self._convert_state_to_number(attacker_length +
                                                  1, honest_length, Fork.
                                                  Irrelevant)] = self.alpha
                self.probability[
                    Action.Wait][i,
                                 self._convert_state_to_number(
                                     attacker_length, honest_length +
                                     1, Fork.Relevant)] = 1 - self.alpha
            elif fork == Fork.Active and attacker_length > honest_length and honest_length > 0 and attacker_length + 1 <= self.max_fork_len and honest_length + 1 <= self.max_fork_len:
                self.probability[Action.Wait][
                    i,
                    self._convert_state_to_number(attacker_length +
                                                  1, honest_length, Fork.Active
                                                  )] = self.alpha
                self.probability[Action.Wait][
                    i,
                    self.
                    _convert_state_to_number(attacker_length -
                                             honest_length, 1, Fork.Relevant
                                             )] = self.gamma * (1 - self.alpha)
                self.attacker_reward[Action.Wait][
                    i,
                    self._convert_state_to_number(
                        attacker_length -
                        honest_length, 1, Fork.Relevant)] = honest_length
                self.probability[Action.Wait][
                    i,
                    self._convert_state_to_number(
                        attacker_length, honest_length +
                        1, Fork.Relevant)] = (1 - self.gamma) * (1 -
                                                                 self.alpha)
            else:
                self.probability[Action.Wait][i, 1] = 1
                self.honest_reward[Action.Wait][i, 1] = 10000

            # Match Action Definition
            if fork == Fork.Relevant and attacker_length >= honest_length and honest_length > 0 and attacker_length + 1 <= self.max_fork_len and honest_length + 1 <= self.max_fork_len:
                self.probability[Action.Match][
                    i,
                    self._convert_state_to_number(attacker_length +
                                                  1, honest_length, Fork.Active
                                                  )] = self.alpha
                self.probability[Action.Match][
                    i,
                    self.
                    _convert_state_to_number(attacker_length -
                                             honest_length, 1, Fork.Relevant
                                             )] = self.gamma * (1 - self.alpha)
                self.attacker_reward[Action.Match][
                    i,
                    self._convert_state_to_number(
                        attacker_length -
                        honest_length, 1, Fork.Relevant)] = honest_length
                self.probability[Action.Match][
                    i,
                    self._convert_state_to_number(
                        attacker_length, honest_length +
                        1, Fork.Relevant)] = (1 - self.gamma) * (1 -
                                                                 self.alpha)
            else:
                self.probability[Action.Match][i, 1] = 1
                self.honest_reward[Action.Match][i, 1] = 10000

        # self._show_log('********************************************')
        # self._show_log(self.attacker_reward[Action.Match])
        # self._show_log('********************************************')
        self._calculate_lower_bound()
コード例 #11
0
def digest_block(node, data, sdef, peer_ip, db_handler):
    """node param for imports"""
    fork = Fork()
    fork2 = Fork2()

    class Transaction():
        def __init__(self):
            self.start_time_tx = 0
            self.q_received_timestamp = 0
            self.received_timestamp = "0.00"
            self.received_address = None
            self.received_recipient = None
            self.received_amount = 0
            self.received_signature_enc = None
            self.received_public_key_hashed = None
            self.received_operation = None
            self.received_openfield = None

    class MinerTransaction():
        def __init__(self):
            self.q_block_timestamp = 0
            self.nonce = None
            self.miner_address = None

    class PreviousBlock():
        def __init__(self):
            db_handler.execute(
                db_handler.c,
                "SELECT block_hash, block_height, timestamp FROM transactions WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;"
            )
            result = db_handler.c.fetchall()

            self.block_hash = result[0][0]
            self.block_height = result[0][1]
            self.q_timestamp_last = quantize_two(result[0][2])

    class BlockArray():
        def __init__(self):
            self.tx_count = 0
            self.block_height_new = node.last_block + 1  # for logging purposes.
            self.block_hash = 'N/A'
            self.failed_cause = ''
            self.block_count = 0

    block_array = BlockArray()

    def transaction_validate():
        """Validates all transaction elements. Raise a ValueError exception on error."""

        # Begin with costless checks first, so we can early exit. Time of tx
        if tx.start_time_tx < tx.q_received_timestamp:
            raise ValueError(
                f"Future transaction not allowed, timestamp {quantize_two((tx.q_received_timestamp - tx.start_time_tx) / 60)} minutes in the future"
            )
        if previous_block.q_timestamp_last - 86400 > tx.q_received_timestamp:
            raise ValueError("Transaction older than 24h not allowed.")
        # Amount
        if float(tx.received_amount) < 0:
            raise ValueError("Negative balance spend attempt")
        # Addresses validity
        if not essentials.address_validate(tx.received_address):
            raise ValueError("Not a valid sender address")
        if not essentials.address_validate(tx.received_recipient):
            raise ValueError("Not a valid recipient address")

        # Now we can process cpu heavier checks, decode and check sig itself
        # Check the sig format first
        essentials.validate_pem(tx.received_public_key_hashed)
        # Now extract the signature verifier.
        received_public_key = RSA.importKey(
            base64.b64decode(tx.received_public_key_hashed))
        received_signature_dec = base64.b64decode(tx.received_signature_enc)
        verifier = PKCS1_v1_5.new(received_public_key)
        # Build the buffer to be verified
        sha_hash = SHA.new(
            str((tx.received_timestamp, tx.received_address,
                 tx.received_recipient, tx.received_amount,
                 tx.received_operation,
                 tx.received_openfield)).encode("utf-8"))
        # Real sig check takes place here
        if not verifier.verify(sha_hash, received_signature_dec):
            raise ValueError(f"Invalid signature from {tx.received_address}")
        else:
            node.logger.app_log.info(
                f"Valid signature from {tx.received_address} to {tx.received_recipient} amount {tx.received_amount}"
            )
        # Reconstruct address from pubkey to make sure it matches
        if tx.received_address != hashlib.sha224(
                base64.b64decode(tx.received_public_key_hashed)).hexdigest():
            raise ValueError("Attempt to spend from a wrong address")

    def dev_reward():
        if int(block_array.block_height_new) % 10 == 0:  # every 10 blocks
            db_handler.dev_reward(node, block_array, miner_tx, mining_reward,
                                  mirror_hash)

    def check_signature(block):
        for entry in block:  # sig 4
            block_array.tx_count += 1
            entry_signature = entry[4]
            if entry_signature:  # prevent empty signature database retry hack
                signature_list.append(entry_signature)
                # reject block with transactions which are already in the ledger ram

                db_handler.execute_param(
                    db_handler.h,
                    "SELECT block_height FROM transactions WHERE signature = ?;",
                    (entry_signature, ))
                tx_presence_check = db_handler.h.fetchone()
                if tx_presence_check:
                    # print(node.last_block)
                    raise ValueError(
                        f"That transaction {entry_signature[:10]} is already in our ledger, block_height {tx_presence_check[0]}"
                    )

                db_handler.execute_param(
                    db_handler.c,
                    "SELECT block_height FROM transactions WHERE signature = ?;",
                    (entry_signature, ))
                tx_presence_check = db_handler.c.fetchone()
                if tx_presence_check:
                    # print(node.last_block)
                    raise ValueError(
                        f"That transaction {entry_signature[:10]} is already in our RAM ledger, block_height {tx_presence_check[0]}"
                    )
            else:
                raise ValueError(f"Empty signature from {peer_ip}")

    if node.peers.is_banned(peer_ip):
        # no need to loose any time with banned peers
        raise ValueError("Cannot accept blocks from a banned peer")
        # since we raise, it will also drop the connection, it's fine since he's banned.

    if not node.db_lock.locked():
        node.db_lock.acquire()
        node.logger.app_log.warning(f"Database lock acquired")

        while mp.MEMPOOL.lock.locked():
            time.sleep(0.1)
            node.logger.app_log.info(
                f"Chain: Waiting for mempool to unlock {peer_ip}")

        node.logger.app_log.warning(f"Chain: Digesting started from {peer_ip}")
        # variables that have been quantized are prefixed by q_ So we can avoid any unnecessary quantize again later. Takes time.
        # Variables that are only used as quantized decimal are quantized once and for all.

        block_size = Decimal(sys.getsizeof(str(data))) / Decimal(1000000)
        node.logger.app_log.warning(f"Chain: Block size: {block_size} MB")

        try:

            block_array_data = data

            # reject block with duplicate transactions
            signature_list = []
            block_transactions = []

            for block in block_array_data:

                block_array.block_count += 1

                # Reworked process: we exit as soon as we find an error, no need to process further tests.
                # Then the exception handler takes place.

                # TODO EGG: benchmark this loop vs a single "WHERE IN" SQL
                # move down, so bad format tx do not require sql query

                check_signature(block)

                block_array.tx_count = len(signature_list)
                if block_array.tx_count != len(set(signature_list)):
                    raise ValueError(
                        "There are duplicate transactions in this block, rejected"
                    )

                del signature_list[:]

                previous_block = PreviousBlock()

                block_array.block_height_new = previous_block.block_height + 1

                db_handler.execute(
                    db_handler.c, "SELECT max(block_height) FROM transactions")
                node.last_block = db_handler.c.fetchone()[0]

                start_time_block = quantize_two(time.time())
                transaction_list_converted = [
                ]  # makes sure all the data are properly converted

                for tx_index, transaction in enumerate(block):
                    tx = Transaction()

                    tx.start_time_tx = quantize_two(time.time())
                    tx.q_received_timestamp = quantize_two(transaction[0])
                    tx.received_timestamp = '%.2f' % tx.q_received_timestamp
                    tx.received_address = str(transaction[1])[:56]
                    tx.received_recipient = str(transaction[2])[:56]
                    tx.received_amount = '%.8f' % (quantize_eight(
                        transaction[3]))
                    tx.received_signature_enc = str(transaction[4])[:684]
                    tx.received_public_key_hashed = str(transaction[5])[:1068]
                    tx.received_operation = str(transaction[6])[:30]
                    tx.received_openfield = str(transaction[7])[:100000]

                    # if transaction == block[-1]:
                    if tx_index == block_array.tx_count - 1:  # faster than comparing the whole tx
                        miner_tx = MinerTransaction()

                        # recognize the last transaction as the mining reward transaction
                        miner_tx.q_block_timestamp = tx.q_received_timestamp
                        miner_tx.nonce = tx.received_openfield[:128]
                        miner_tx.miner_address = tx.received_address

                    transaction_list_converted.append(
                        (tx.received_timestamp, tx.received_address,
                         tx.received_recipient, tx.received_amount,
                         tx.received_signature_enc,
                         tx.received_public_key_hashed, tx.received_operation,
                         tx.received_openfield))

                    # if (start_time_tx < q_received_timestamp + 432000) or not quicksync:

                    # convert readable key to instance

                    transaction_validate()

                # reject blocks older than latest block
                if miner_tx.q_block_timestamp <= previous_block.q_timestamp_last:
                    raise ValueError(
                        "Block is older than the previous one, will be rejected"
                    )

                # calculate current difficulty (is done for each block in block array, not super easy to isolate)
                diff = difficulty(node, db_handler)
                node.difficulty = diff

                node.logger.app_log.warning(
                    f"Time to generate block {previous_block.block_height + 1}: {'%.2f' % diff[2]}"
                )
                node.logger.app_log.warning(f"Current difficulty: {diff[3]}")
                node.logger.app_log.warning(f"Current blocktime: {diff[4]}")
                node.logger.app_log.warning(f"Current hashrate: {diff[5]}")
                node.logger.app_log.warning(
                    f"Difficulty adjustment: {diff[6]}")
                node.logger.app_log.warning(f"Difficulty: {diff[0]} {diff[1]}")

                # node.logger.app_log.info("Transaction list: {}".format(transaction_list_converted))
                block_array.block_hash = hashlib.sha224(
                    (str(transaction_list_converted) +
                     previous_block.block_hash).encode("utf-8")).hexdigest()
                # node.logger.app_log.info("Last block sha_hash: {}".format(block_hash))
                node.logger.app_log.info(
                    f"Calculated block sha_hash: {block_array.block_hash}")
                # node.logger.app_log.info("Nonce: {}".format(nonce))

                # check if we already have the sha_hash
                db_handler.execute_param(
                    db_handler.h,
                    "SELECT block_height FROM transactions WHERE block_hash = ?",
                    (block_array.block_hash, ))
                dummy = db_handler.h.fetchone()
                if dummy:
                    raise ValueError(
                        "Skipping digestion of block {} from {}, because we already have it on block_height {}"
                        .format(block_array.block_hash[:10], peer_ip,
                                dummy[0]))

                if node.is_mainnet:
                    if block_array.block_height_new < fork.POW_FORK:
                        diff_save = mining.check_block(
                            block_array.block_height_new,
                            miner_tx.miner_address,
                            miner_tx.nonce,
                            previous_block.block_hash,
                            diff[0],
                            tx.received_timestamp,
                            tx.q_received_timestamp,
                            previous_block.q_timestamp_last,
                            peer_ip=peer_ip,
                            app_log=node.logger.app_log)
                    else:
                        diff_save = mining_heavy3.check_block(
                            block_array.block_height_new,
                            miner_tx.miner_address,
                            miner_tx.nonce,
                            previous_block.block_hash,
                            diff[0],
                            tx.received_timestamp,
                            tx.q_received_timestamp,
                            previous_block.q_timestamp_last,
                            peer_ip=peer_ip,
                            app_log=node.logger.app_log)
                elif node.is_testnet:
                    diff_save = mining_heavy3.check_block(
                        block_array.block_height_new,
                        miner_tx.miner_address,
                        miner_tx.nonce,
                        previous_block.block_hash,
                        diff[0],
                        tx.received_timestamp,
                        tx.q_received_timestamp,
                        previous_block.q_timestamp_last,
                        peer_ip=peer_ip,
                        app_log=node.logger.app_log)
                else:
                    # it's regnet then, will use a specific fake method here.
                    diff_save = mining_heavy3.check_block(
                        block_array.block_height_new,
                        miner_tx.miner_address,
                        miner_tx.nonce,
                        previous_block.block_hash,
                        regnet.REGNET_DIFF,
                        tx.received_timestamp,
                        tx.q_received_timestamp,
                        previous_block.q_timestamp_last,
                        peer_ip=peer_ip,
                        app_log=node.logger.app_log)

                fees_block = []
                mining_reward = 0  # avoid warning

                # Cache for multiple tx from same address
                balances = {}
                for tx_index, transaction in enumerate(block):
                    db_timestamp = '%.2f' % quantize_two(transaction[0])
                    db_address = str(transaction[1])[:56]
                    db_recipient = str(transaction[2])[:56]
                    db_amount = '%.8f' % quantize_eight(transaction[3])
                    db_signature = str(transaction[4])[:684]
                    db_public_key_hashed = str(transaction[5])[:1068]
                    db_operation = str(transaction[6])[:30]
                    db_openfield = str(transaction[7])[:100000]

                    block_debit_address = 0
                    block_fees_address = 0

                    # this also is redundant on many tx per address block
                    for x in block:
                        if x[1] == db_address:  # make calculation relevant to a particular address in the block
                            block_debit_address = quantize_eight(
                                Decimal(block_debit_address) + Decimal(x[3]))

                            if x != block[-1]:
                                block_fees_address = quantize_eight(
                                    Decimal(block_fees_address) + Decimal(
                                        essentials.fee_calculate(
                                            db_openfield, db_operation,
                                            node.last_block))
                                )  # exclude the mining tx from fees

                    # print("block_fees_address", block_fees_address, "for", db_address)
                    # node.logger.app_log.info("Digest: Inbound block credit: " + str(block_credit))
                    # node.logger.app_log.info("Digest: Inbound block debit: " + str(block_debit))
                    # include the new block

                    # if (start_time_tx < q_received_timestamp + 432000) and not quicksync:
                    # balance_pre = quantize_eight(credit_ledger - debit_ledger - fees + rewards)  # without projection
                    balance_pre = ledger_balance3(
                        db_address, balances,
                        db_handler)  # keep this as c (ram hyperblock access)

                    # balance = quantize_eight(credit - debit - fees + rewards)
                    balance = quantize_eight(balance_pre - block_debit_address)
                    # node.logger.app_log.info("Digest: Projected transaction address balance: " + str(balance))
                    # else:
                    #    print("hyp2")

                    fee = essentials.fee_calculate(db_openfield, db_operation,
                                                   node.last_block)

                    fees_block.append(quantize_eight(fee))
                    # node.logger.app_log.info("Fee: " + str(fee))

                    # decide reward
                    if tx_index == block_array.tx_count - 1:
                        db_amount = 0  # prevent spending from another address, because mining txs allow delegation
                        if previous_block.block_height <= 10000000:
                            mining_reward = 15 - (
                                quantize_eight(block_array.block_height_new) /
                                quantize_eight(1000000 / 2)) - Decimal("0.8")
                            if mining_reward < 0:
                                mining_reward = 0
                        else:
                            mining_reward = 0

                        reward = quantize_eight(mining_reward +
                                                sum(fees_block[:-1]))
                        # don't request a fee for mined block so new accounts can mine
                        fee = 0
                    else:
                        reward = 0

                    if quantize_eight(balance_pre) < quantize_eight(db_amount):
                        raise ValueError(
                            f"{db_address} sending more than owned: {db_amount}/{balance_pre}"
                        )

                    if quantize_eight(balance) - quantize_eight(
                            block_fees_address) < 0:
                        # exclude fee check for the mining/header tx
                        raise ValueError(
                            f"{db_address} Cannot afford to pay fees (balance: {balance}, block fees: {block_fees_address})"
                        )

                    # append, but do not insert to ledger before whole block is validated, note that it takes already validated values (decimals, length)
                    node.logger.app_log.info(
                        f"Chain: Appending transaction back to block with {len(block_transactions)} transactions in it"
                    )
                    block_transactions.append(
                        (str(block_array.block_height_new), str(db_timestamp),
                         str(db_address), str(db_recipient), str(db_amount),
                         str(db_signature), str(db_public_key_hashed),
                         str(block_array.block_hash), str(fee), str(reward),
                         str(db_operation), str(db_openfield)))

                    try:
                        mp.MEMPOOL.delete_transaction(db_signature)
                        node.logger.app_log.info(
                            f"Chain: Removed processed transaction {db_signature[:56]} from the mempool while digesting"
                        )
                    except:
                        # tx was not or is no more in the local mempool
                        pass
                # end for block

                # save current diff (before the new block)

                # quantized vars have to be converted, since Decimal is not json serializable...
                node.plugin_manager.execute_action_hook(
                    'block', {
                        'height': block_array.block_height_new,
                        'diff': diff_save,
                        'sha_hash': block_array.block_hash,
                        'timestamp': float(miner_tx.q_block_timestamp),
                        'miner': miner_tx.miner_address,
                        'ip': peer_ip
                    })

                node.plugin_manager.execute_action_hook(
                    'fullblock', {
                        'height': block_array.block_height_new,
                        'diff': diff_save,
                        'sha_hash': block_array.block_hash,
                        'timestamp': float(miner_tx.q_block_timestamp),
                        'miner': miner_tx.miner_address,
                        'ip': peer_ip,
                        'transactions': block_transactions
                    })

                db_handler.execute_param(
                    db_handler.c, "INSERT INTO misc VALUES (?, ?)",
                    (block_array.block_height_new, diff_save))
                db_handler.commit(db_handler.conn)

                #db_handler.execute_many(db_handler.c, "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)", block_transactions)

                for transaction2 in block_transactions:
                    db_handler.execute_param(
                        db_handler.c,
                        "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?,?)",
                        (str(transaction2[0]), str(transaction2[1]),
                         str(transaction2[2]), str(transaction2[3]),
                         str(transaction2[4]), str(transaction2[5]),
                         str(transaction2[6]), str(transaction2[7]),
                         str(transaction2[8]), str(transaction2[9]),
                         str(transaction2[10]), str(transaction2[11])))
                    # secure commit for slow nodes
                    db_handler.commit(db_handler.conn)

                # savings
                if node.is_testnet or block_array.block_height_new >= 843000:
                    # no savings for regnet
                    if int(block_array.block_height_new
                           ) % 10000 == 0:  # every x blocks

                        staking.staking_update(db_handler.conn, db_handler.c,
                                               db_handler.index,
                                               db_handler.index_cursor,
                                               "normal",
                                               block_array.block_height_new,
                                               node.logger.app_log)
                        staking.staking_payout(
                            db_handler.conn, db_handler.c, db_handler.index,
                            db_handler.index_cursor,
                            block_array.block_height_new,
                            float(miner_tx.q_block_timestamp),
                            node.logger.app_log)
                        staking.staking_revalidate(
                            db_handler.conn, db_handler.c, db_handler.index,
                            db_handler.index_cursor,
                            block_array.block_height_new, node.logger.app_log)

                # new sha_hash
                db_handler.execute(
                    db_handler.c,
                    "SELECT * FROM transactions WHERE block_height = (SELECT max(block_height) FROM transactions)"
                )
                # Was trying to simplify, but it's the latest mirror sha_hash. not the latest block, nor the mirror of the latest block.
                # c.execute("SELECT * FROM transactions WHERE block_height = ?", (block_array.block_height_new -1,))
                tx_list_to_hash = db_handler.c.fetchall()
                mirror_hash = hashlib.blake2b(str(tx_list_to_hash).encode(),
                                              digest_size=20).hexdigest()
                # /new sha_hash

                dev_reward()

                # node.logger.app_log.warning("Block: {}: {} valid and saved from {}".format(block_array.block_height_new, block_hash[:10], peer_ip))
                node.logger.app_log.warning(
                    f"Valid block: {block_array.block_height_new}: {block_array.block_hash[:10]} with {len(block)} txs, digestion from {peer_ip} completed in {str(time.time() - float(start_time_block))[:5]}s."
                )

                del block_transactions[:]
                node.peers.unban(peer_ip)

                # This new block may change the int(diff). Trigger the hook whether it changed or not.
                diff = difficulty(node, db_handler)
                node.difficulty = diff
                node.plugin_manager.execute_action_hook('diff', diff[0])
                # We could recalc diff after inserting block, and then only trigger the block hook, but I fear this would delay the new block event.

                # /whole block validation
                # NEW: returns new block sha_hash

            checkpoint_set(node, block_array.block_height_new)
            return block_array.block_hash

        except Exception as e:
            node.logger.app_log.warning(f"Chain processing failed: {e}")

            node.logger.app_log.info(f"Received data dump: {data}")

            block_array.failed_cause = str(e)
            # Temp

            exc_type, exc_obj, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            print(exc_type, fname, exc_tb.tb_lineno)

            if node.peers.warning(sdef, peer_ip, "Rejected block", 2):
                raise ValueError(f"{peer_ip} banned")
            raise ValueError("Chain: digestion aborted")

        finally:

            if node.ram:
                db_to_drive(node, db_handler)

            node.db_lock.release()
            node.logger.app_log.warning(f"Database lock released")

            delta_t = time.time() - float(start_time_block)
            # node.logger.app_log.warning("Block: {}: {} digestion completed in {}s.".format(block_array.block_height_new,  block_hash[:10], delta_t))
            node.plugin_manager.execute_action_hook(
                'digestblock', {
                    'failed': block_array.failed_cause,
                    'ip': peer_ip,
                    'deltat': delta_t,
                    "blocks": block_array.block_count,
                    "txs": block_array.tx_count
                })

    else:
        node.logger.app_log.warning(
            f"Chain: Skipping processing from {peer_ip}, someone delivered data faster"
        )
        node.plugin_manager.execute_action_hook('digestblock', {
            'failed': "skipped",
            'ip': peer_ip
        })