def on_genesis(self, chain): assert not chain.initialized self.state_maker = StateMaker(chain) self.super_state = self.state_maker.super_state debug('Block.on_genesis called') class Counter(Dapp): def on_block(self, block, chain): if block.height > 0: last_value = self.state[block.height - 1] if last_value > 1: if last_value % 2 == 0: self.state[block.height] = last_value // 2 else: self.state[block.height] = 3 * last_value + 1 else: self.state[block.height] = block.height debug('Counter: on_block called.', self.state.key_value_store) def on_transaction(self, subtx, block, chain): pass self.state_maker.register_dapp(Counter(b'', self.state_maker)) self.setup_rpc()
def on_transaction(self, tx, block, chain): ''' Process a transaction. tx has following info (subject to change): tx.value, tx.fee, tx.data, tx.sender, tx.dapp tx.value >= 0 not >0 so 0 value txs (like notification of new foreign blocks) can be free ''' self.assert_true(tx.value >= 0, 'tx.value must be greater than or equal to 0') self.assert_true(tx.fee >= 0, 'tx.fee must be greater than or equal to 0') self.assert_true(tx.donation >= 0, 'tx.donation must be greater than or equal to 0') debug('TxPrism.on_transaction', tx.sender, tx.value) self.assert_true(self.state[tx.sender] >= tx.value + tx.fee + tx.donation, 'sender must have enough funds') self.state[tx.sender] -= tx.value + tx.fee + tx.donation self.state[TxPrism.KNOWN_PUBKEY_X] += tx.fee self.state[TxPrism.EUDEMONIA_PUBKEY_X] += tx.donation if tx.dapp == b'': self.assert_true(len(tx.data) == 1, 'Only one recipient allowed when sending to root dapp') self.assert_true(len(tx.data[0]) <= 33, 'recipient must be <= 32 bytes long (should be pubkey_x)') recipient = tx.data[0] self.state[recipient] += tx.value else: self.assert_true(tx.dapp in self.state_maker.dapps, 'dapp must be known') self.state[tx.dapp] += tx.value self.state_maker.dapps[tx.dapp].on_transaction(tx, block, chain) self.state.recursively_print_state() debug('TxPrism.on_tx: super_state hash', self.super_state.get_hash())
def add_block(self, block, skip_db=False): ''' returns True on success ''' if self.has_block(block): return if block.get_hash() in self.invalid_block_hashes or block.parent_hash in self.invalid_block_hashes: debug('Chain: add_block: invalid block: #%d, %064x' % (block.priority, block.get_hash())) self.invalid_block_hashes.add(block.get_hash()) return if not skip_db: self.db.set_entry(block.get_hash(), block) self.db.set_ancestors(block) self.blocks.add(block) self.block_index[block.get_hash()] = block self.block_hashes.add(block.get_hash()) self.block_hashes_with_priority.put((1 / (1 + block.priority), block.get_hash())) block.assert_validity(self) if block.better_than(self.head): self.set_head(block) debug('added block %d, hash: %064x' % (block.priority, block.get_hash())) self.restart_miner() return True
def block_reseeker(self): ''' 1. Are there any blocks that were requested more than X seconds ago 1.1 throw it away if it is no longer in the self.present set 1.2 for each block that was, add it to requesting ''' requesting = IntList() while not self._shutdown: to_get = min(len(self.future), self.max_blocks_at_once()) - requesting.len() oldest_timestamp, oldest_block_hash = yield from self.present_queue.get( ) if oldest_timestamp + 10 < time.time( ) and to_get > 0: # requested >10s ago if oldest_block_hash in self.present: if self.chain.has_block_hash(oldest_block_hash): self.present.remove(oldest_block_hash) else: debug('seeker, block re-request: %064x' % oldest_block_hash) requesting.append(oldest_block_hash) else: yield from self.present_queue.put( (oldest_timestamp, oldest_block_hash)) # put hash back in the queue if len(requesting) > 0: self.p2p.broadcast(b'request_blocks', requesting.serialize()) for hash in requesting: self.present_queue.put_nowait((time.time(), hash)) requesting = IntList() yield from asyncio.sleep(0.5)
def reorganisation(self, chain, from_block, around_block, to_block, is_test=False): ''' self.reorganisation() should be called on current head, where to_block is to become the new head of the chain. Steps: 10. From around_block find the prune point 15. Generate the chain_path_to_trial 20. Conduct Trial 30. Return result 40. Mark trial head as invalid if the trial failed. ''' assert not self.is_future debug('StateMaker.reorg: around_block.get_hash(): %064x' % around_block.get_hash()) around_state_height = self.find_prune_point(around_block.height) debug('StateMaker.reorganisation: around_state_height: %d' % around_state_height) chain_path_to_trial = chain.construct_chain_path(chain.block_height_to_hash[around_state_height], to_block.get_hash()) if len(chain_path_to_trial) > 0: chain_path_to_trial.pop(0) # this might not be needed actually if is_test: success = self.trial_chain_path_non_permanent(around_state_height, chain_path_to_trial) else: success = self.trial_chain_path(around_state_height, chain_path_to_trial) if not success and not is_test: chain.recursively_mark_invalid(chain_path_to_trial[-1].get_hash()) if not is_test and success: self._refresh_future_block(to_block) self.most_recent_block = to_block return success
def generate_super_state(self): debug('Generating super state') super_state = SuperState() for d in self.dapps: dapp = self.dapps[d] super_state.register_dapp(dapp.name, dapp.state) return super_state
def block_reseeker(self): ''' 1. Are there any blocks that were requested more than X seconds ago 1.1 throw it away if it is no longer in the self.present set 1.2 for each block that was, add it to requesting ''' requesting = IntList() while not self._shutdown: to_get = min(len(self.future), self.max_blocks_at_once()) - requesting.len() oldest_timestamp, oldest_block_hash = yield from self.present_queue.get() if oldest_timestamp + 10 < time.time() and to_get > 0: # requested >10s ago if oldest_block_hash in self.present: if self.chain.has_block_hash(oldest_block_hash): self.present.remove(oldest_block_hash) else: debug('seeker, block re-request: %064x' % oldest_block_hash) requesting.append(oldest_block_hash) else: yield from self.present_queue.put((oldest_timestamp, oldest_block_hash)) # put hash back in the queue if len(requesting) > 0: self.p2p.broadcast(b'request_blocks', requesting.serialize()) for hash in requesting: self.present_queue.put_nowait((time.time(), hash)) requesting = IntList() yield from asyncio.sleep(0.5)
def block_seeker(self): ''' block_seeker() is run through asyncio. block_seeker will run in a loop and: 2. Find the number of blocks to send 2.1 get that many from the future_queue 3. For each block_hash to request, add it to the present_queue with the time it was requested. 4. Pick a random peer and send the request to it. ''' while not self._shutdown and not self.chain.initialized: yield from asyncio.sleep(0.1) while not self._shutdown: requesting = IntList() # encodium object to_get = min(len(self.future), self.max_blocks_at_once()) if to_get > 0: # pick some blocks to request for i in range(to_get): priority, h = self.future_queue.get_nowait() #print('block_seeker: asking for height: ',_) self.future.remove(h) if priority != 0: # note: why exclude priority 0? genesis block? requesting.append(h) for h in requesting: self.present_queue.put_nowait((int(time.time()), h)) self.present.add(h) if requesting.len() > 0: # TODO : don't broadcast to all nodes, just one debug('Requesting: ', requesting.serialize(), len(self.future)) self.p2p.broadcast(b'request_blocks', requesting.serialize()) else: yield from asyncio.sleep(0.01)
def assert_validity(self, chain): ''' self.assert_validity should validate the following: * self.header.state_mr equals root of self.super_state ''' self.assert_internal_consistency() self.header.assert_validity(chain) if self.state_maker is None: self._set_state_maker(chain.head.state_maker) if chain.initialized: debug(self) self.assert_true(chain.has_block_hash(self.parent_hash), 'Parent must be known') self.assert_true( chain.get_block(self.parent_hash).height + 1 == self.height, 'Height requirement') self.assert_true( self.super_state.get_hash() == self.header.state_mr, 'State root must match expected') else: self.assert_true(self.height == 0, 'Genesis req.: height must be 0') self.assert_true(self.parent_hash == 0, 'Genesis req.: parent_hash must be zeroed') self.assert_true(self.header.state_mr == 0, 'Genesis req.: state_mr zeroed')
def reorganisation(self, chain, from_block, around_block, to_block, is_test=False): ''' self.reorganisation() should be called on current head, where to_block is to become the new head of the chain. Steps: 10. From around_block find the prune point 15. Generate the chain_path_to_trial 20. Conduct Trial 30. Return result 40. Mark trial head as invalid if the trial failed. ''' assert not self.is_future debug('StateMaker.reorg: around_block.get_hash(): %064x' % around_block.get_hash()) around_state_height = self.find_prune_point(around_block.height) debug('StateMaker.reorganisation: around_state_height: %d' % around_state_height) chain_path_to_trial = chain.construct_chain_path(around_block.get_hash(), to_block.get_hash()) if is_test: success = self.trial_chain_path_non_permanent(around_state_height, chain_path_to_trial) else: success = self.trial_chain_path(around_state_height, chain_path_to_trial) if not success and not is_test: chain.recursively_mark_invalid(chain_path_to_trial[-1].get_hash()) if not is_test and success: self._refresh_future_block(to_block) self.most_recent_block = to_block return success
def apply_block(self, block, hard_checkpoint=True): debug('StateMaker.apply_block, heights:', block.height, self.dapps[ROOT_DAPP].state.height) assert block.height == self.dapps[ROOT_DAPP].state.height self._block_events(block) block.assert_validity(self.chain) if block.height != 0: self.checkpoint(hard_checkpoint)
def on_genesis(self, chain): assert not chain.initialized self.state_maker = StateMaker(chain) self.super_state = self.state_maker.super_state debug('Block.on_genesis called') class Counter(Dapp): def on_block(self, block, chain): if block.height > 0: last_value = self.state[block.height - 1] if last_value > 1: if last_value % 2 == 0: self.state[block.height] = last_value // 2 else: self.state[block.height] = 3 * last_value + 1 else: self.state[block.height] = block.height debug('Counter: on_block called.', self.state.key_value_store) self.state.recursively_print_state() def on_transaction(self, subtx, block, chain): pass self.state_maker.register_dapp(Counter(b'', self.state_maker)) self.setup_rpc()
def update_roots(self, update_state_mr=True, update_tx_mr=True): if self.height != 0: debug('UPDATE_ROOTS') if update_state_mr: self.header.state_mr = self.state_maker.super_state.get_hash() if update_tx_mr: self.header.transaction_mr = MerkleLeavesToRoot(leaves=[i.get_hash() for i in self.super_txs]).get_hash()
def make_genesis(): genesis_block = MinBlockWithState.make(parent_hash=0,height=0) genesis_block._set_state_maker(StateMaker(min_net.chain, MinBlockWithState)) genesis_block.update_state_root() miner = Miner(min_net.chain, min_net.seek_n_build) miner.mine(genesis_block) debug('Genesis Block: ', genesis_block.serialize())
def chain_builder(self): ''' 1. Get the next block. 2. If the block is potentially the next block (or older that the chain head) 2.1 Check we don't already have it 2.2 Ensure it's valid 2.3 Add it to the Chain 2.4 Broadcast to peers ''' while not self._shutdown and not self.chain.initialized: time.sleep(0.1) while not self._shutdown: try: with self.past_lock: height, nonce, block = self.past_queue.get(timeout=0.1) except queue.Empty: continue if block.height == 0: self.past.remove(block.get_hash()) self.done.add(block.get_hash()) continue block_hash = block.get_hash() #print('chain_builder: checking %d' % block.height) # TODO : handle orphans intelligently if block.height > self.get_chain_height() + 1: #print('chain_builder: chain height: %d' % self.get_chain_height()) #print('chain_builder: block.height %d' % block.height) # try some of those which were parentless: with self.past_lock: self.past_queue.put((height, nonce, block)) while not self.past_queue_no_parent.empty(): self.past_queue.put(self.past_queue_no_parent.get()) time.sleep(0.05) else: if self.chain.has_block(block_hash): try: self.past.remove(block_hash) except KeyError: pass self.done.add(block_hash) continue # TODO : handle orphans intelligently if not self.chain.has_block_hash(block.parent_hash): debug('chain_builder: don\'t have parent') debug('chain_builder: head and curr', self.chain.head.get_hash(), block.parent_hash) with self.past_lock: self.past_queue_no_parent.put((height, nonce, block)) continue # todo: only broadcast block on success with self.past_lock: self.past.remove(block_hash) self.done.add(block_hash) self.chain.add_block(block) debug('builder to send : %064x' % block.get_hash()) to_send = BlocksMessage(contents=[block.serialize()]) debug('builder sending...') verbose_debug('builder to send full : %s' % to_send.serialize()) self.broadcast_block(to_send) debug('builder success : %064x' % block.get_hash())
def apply_block(self, block, hard_checkpoint=True): debug('StateMaker.apply_block, heights:', block.height, self.dapps[ROOT_DAPP].state.height) self.dapps[ROOT_DAPP].state.recursively_print_state() assert block.height == self.dapps[ROOT_DAPP].state.height self._block_events(block) block.assert_validity(self.chain) if block.height != 0: self.checkpoint(hard_checkpoint)
def add_super_tx(self, super_tx): #assert self.state_maker.get_height() < self.height # only add txs to future blocks debug('Adding stx:', super_tx) if super_tx.coinbase: self.remove_coinbase_transaction() self.super_txs.append(super_tx) self.state_maker.apply_super_tx_to_future(super_tx) self.update_roots()
def update_roots(self, update_state_mr=True, update_tx_mr=True): if self.height != 0: debug('UPDATE_ROOTS') if update_state_mr: self.header.state_mr = self.state_maker.super_state.get_hash() if update_tx_mr: self.header.transaction_mr = MerkleLeavesToRoot( leaves=[i.get_hash() for i in self.super_txs]).get_hash()
def _mark_invalid(self, invalid_block_hash): debug('Chain: Marking %064x as invalid' % invalid_block_hash) self.invalid_block_hashes.add(invalid_block_hash) if invalid_block_hash in self.block_hashes: invalid_block = self.get_block(invalid_block_hash) self.block_hashes.remove(invalid_block_hash) self.blocks.remove(invalid_block) self.block_hashes_with_priority.add_to_invalid_set((invalid_block.priority, invalid_block_hash))
def _construct_best_chain(self): ''' Find best block not in invalid_block_hashes. Run a reorg from head to that block. ''' block_hash = None inverse_priority, block_hash = self.block_hashes_with_priority.get() self.block_hashes_with_priority.put((inverse_priority, block_hash)) debug('_construct_best_chain: priority, best_block: %d, %064x' % (1 / inverse_priority - 1, block_hash)) self.set_head(self.get_block(block_hash))
def find_prune_point(self, max_prune_height): ''' This simply finds the best point to prune the chain taking into consideration the StateDeltas we have. Greatest height of checkpoints at or below max_prune_height. ''' assert max_prune_height >= 0 if self.height <= max_prune_height: debug('StateDelta: find_prune_point: returning %d' % self.height) return self.height return self.parent.find_prune_point(max_prune_height)
def request_blocks_handler(node, requests): if config['network_debug'] or True: debug('MSG request_blocks : %064x' % requests.get_hash()) blocks_to_send = BytesList() for bh in set(requests.contents): if self.chain.has_block_hash(bh): blocks_to_send.append(self.chain.get_block(bh).serialize()) if blocks_to_send.len() > 0: node.send(b'blocks', blocks_to_send)
def request_blocks_handler(node, requests): if config['network_debug'] or True: debug('MSG request_blocks : %064x' % requests.get_hash()) blocks_to_send = BytesList() for bh in requests: if self.chain.has_block_hash(bh): blocks_to_send.append(self.chain.get_block(bh).serialize()) if blocks_to_send.len() > 0: node.send('blocks', blocks_to_send)
def assert_valid_signature(self, message): ''' ''' # TODO assert pubkey is valid for curve if not pycoin.ecdsa.verify(pycoin.ecdsa.generator_secp256k1, (self.pubkey_x, self.pubkey_y), global_hash(message), (self.r, self.s)): raise ValidationError('Signature failed to verify') debug('Signature.assert_valid_signature', message)
def checkpoint(self, hard_checkpoint=True): ''' Fork off from current StateDelta if hard_checkpoint == True. Some ancestors may be merged. Return a new StateDelta. ''' debug('StateDelta: checkpoint') new_state_delta = StateDelta(self, self.height + 1) if hard_checkpoint: self.harden(new_state_delta) return new_state_delta
def _trial_chain_path(self, around_state_height, chain_path_to_trial): success = True try: self.apply_chain_path(chain_path_to_trial, hard_checkpoint=False) except (ValidationError) as e: success = False debug(e) debug('StateMaker: trial failed, around: %d, proposed head: %064x' % (around_state_height, chain_path_to_trial[-1].get_hash())) return success
def update_coinbase(self, pay_to): self.future_block.remove_coinbase_transaction() self.refresh_future_super_txs() with self.future_state(): debug('Statemaker: update_coinbase, height', self.get_height(), self.future_block.height) self._block_events(self.future_block) max_coinbase_value = self.dapps[ROOT_DAPP].state[TxPrism.KNOWN_PUBKEY_X] stx = self._SuperTxClass.create_coinbase(pay_to, max_coinbase_value) self.future_block.add_super_tx(stx) debug('Statemaker: future block', self.future_block)
def get_hash(self): leaves = [] names = list(self.state_dict.keys()) # all names are bytes, need to investigate exactly how these are sorted. names.sort() for n in names: leaves.extend([global_hash(n), self.state_dict[n].get_hash()]) merkle_root = MerkleLeavesToRoot(leaves=leaves) debug('SuperState: root: ', merkle_root.get_hash(), [self.state_dict[n].complete_kvs() for n in names]) return merkle_root.get_hash()
def on_block(self, block, chain): if block.height > 0: last_value = self.state[block.height - 1] if last_value > 1: if last_value % 2 == 0: self.state[block.height] = last_value // 2 else: self.state[block.height] = 3 * last_value + 1 else: self.state[block.height] = block.height debug('Counter: on_block called.', self.state.key_value_store)
def sign(self, secret_exponent, message): ''' Set r,s according to ''' assert isinstance(message, int) # TODO check that we're doing this right. self.r, self.s = pycoin.ecdsa.sign(pycoin.ecdsa.generator_secp256k1, secret_exponent, global_hash(message)) self.pubkey_x, self.pubkey_y = pycoin.ecdsa.public_pair_for_secret_exponent( pycoin.ecdsa.generator_secp256k1, secret_exponent) self.sender = self.pubkey_x self.assert_valid_signature(message) debug('Signature.sign', message)
def sign(self, secret_exponent, message): ''' Set r,s according to ''' assert isinstance(message, int) # TODO check that we're doing this right. self.r, self.s = pycoin.ecdsa.sign(pycoin.ecdsa.generator_secp256k1, secret_exponent, global_hash(message)) self.pubkey_x, self.pubkey_y = pycoin.ecdsa.public_pair_for_secret_exponent(pycoin.ecdsa.generator_secp256k1, secret_exponent) self.sender = self.pubkey_x self.assert_valid_signature(message) debug('Signature.sign', message)
def update(self): if len(self.leaves) == 0: self.root = 0 else: try: t = self.leaves[:] while len(t) > 1: if len(t) % 2 != 0: t.append(int.from_bytes(b'\x00' * 32, 'big')) t = [self.my_hash(t[i].to_bytes(32, 'big') + t[i + 1].to_bytes(32, 'big')) for i in range(0, len(t), 2)] self.root = t[0] except: debug('MerkleTree update, leaves :', self.leaves) raise
def intro_handler(node, their_intro): debug('intro_handler') if config['network_debug'] or True: debug('MSG intro : %064x' % their_intro.get_hash()) self.intros[node] = their_intro debug('intro_handler: the peer: ', node.address) if their_intro.top_block != 0 and not self.chain.has_block_hash( their_intro.top_block): debug('intro_handler: their top_block %064x' % their_intro.top_block) self.seek_n_build.seek_hash_now(their_intro.top_block) #@asyncio.coroutine #def repeat_intro(): # node.send(b'request_heights', IntList(contents=list(range(self.chain.head.height + 1, their_intro.top_height + 1)))) # yield from asyncio.sleep(30)#*60) # self._loop.run_until_complete(repeat_intro()) #if their_intro.top_height > self.chain.head.height: # self._loop.run_until_complete(repeat_intro()) node.send( b'request_heights', IntList(contents=list( range(self.chain.head.height + 1, their_intro.top_height + 1))))
def set_handlers(self): debug('set_handlers') @self.p2p.on_connect def on_connect_handler(node): debug('on_connect_handler') my_intro = Intro(top_block=self.chain.head.get_hash()) node.send('intro', my_intro) @self.p2p.on_message('intro', Intro) def intro_handler(node, their_intro): debug('intro_handler') if config['network_debug'] or True: debug('MSG intro : %064x' % their_intro.get_hash()) self.intros[node] = their_intro debug('intro_handler: the peer: ', node.address) if not self.chain.has_block_hash(their_intro.top_block): debug('intro_handler: their top_block %064x' % their_intro.top_block) self.seek_n_build.seek_hash_now(their_intro.top_block) @self.p2p.on_message('blocks', BytesList) def blocks_handler(node, block_list): if config['network_debug'] or True: debug('MSG blocks : %064x' % block_list.get_hash()) for serialized_block in block_list: try: potential_block = self._Block(serialized_block) potential_block.assert_internal_consistency() debug('blocks_handler: accepting block of height %d' % potential_block.height) except ValidationError as e: debug('blocks_handler: serialized_block:', serialized_block) debug('blocks_handler error', e) #node.misbehaving() continue self.seek_n_build.add_block(potential_block) self.seek_n_build.seek_many_with_priority(potential_block.related_blocks()) @self.p2p.on_message('request_blocks', HashList) def request_blocks_handler(node, requests): if config['network_debug'] or True: debug('MSG request_blocks : %064x' % requests.get_hash()) blocks_to_send = BytesList() for bh in requests: if self.chain.has_block_hash(bh): blocks_to_send.append(self.chain.get_block(bh).serialize()) if blocks_to_send.len() > 0: node.send('blocks', blocks_to_send)
def _add_block(self, block): block_hash = block.get_hash() if block_hash in self.past or block_hash in self.done: return debug('SNB: Add Block %s' % block_hash) to_put = (block.height, self.nonces.get_next(), block) self.past.add(block_hash) self.past_queue.put_nowait(to_put) self.heights_received.add(block.height) self.all.add(block_hash) if block_hash in self.present: self.present.remove(block_hash)
def calc_expected_target(header, previous_block, chain): ''' given a header and previous_block, calculate the expected target ''' if header.previous_blocks[0] == 0: return GrachtenHeader.DEFAULT_TARGET if header.height % GrachtenHeader.RETARGET_PERIOD != 0: return previous_block.header.target old_ancestor = chain.get_block(header.previous_blocks[(GrachtenHeader.RETARGET_PERIOD-1).bit_length()]) timedelta = header.timestamp - old_ancestor.header.timestamp expected_timedelta = 60 * 60 * 24 * GrachtenHeader.RETARGET_PERIOD // GrachtenHeader.BLOCKS_PER_DAY if timedelta < expected_timedelta // 4: timedelta = expected_timedelta // 4 if timedelta > expected_timedelta * 4: timedelta = expected_timedelta * 4 new_target = previous_block.header.target * timedelta // expected_timedelta debug('New Target Calculated: %064x, height: %d' % (new_target, header.height)) return new_target
def intro_handler(node, their_intro): debug('intro_handler') if config['network_debug'] or True: debug('MSG intro : %064x' % their_intro.get_hash()) self.intros[node] = their_intro debug('intro_handler: the peer: ', node.address) if not self.chain.has_block_hash(their_intro.top_block): debug('intro_handler: their top_block %064x' % their_intro.top_block) self.seek_n_build.seek_hash_now(their_intro.top_block)
def assert_validity(self, chain): ''' self.assert_validity should validate the following: * self.header.state_mr equals root of self.super_state ''' self.assert_internal_consistency() self.header.assert_validity(chain) if self.state_maker is None: self._set_state_maker(chain.head.state_maker) if chain.initialized: debug(self) self.assert_true(chain.has_block_hash(self.parent_hash), 'Parent must be known') self.assert_true(chain.get_block(self.parent_hash).height + 1 == self.height, 'Height requirement') self.assert_true(self.super_state.get_hash() == self.header.state_mr, 'State root must match expected') else: self.assert_true(self.height == 0, 'Genesis req.: height must be 0') self.assert_true(self.parent_hash == 0, 'Genesis req.: parent_hash must be zeroed') self.assert_true(self.header.state_mr == 0, 'Genesis req.: state_mr zeroed')
def update(self): if len(self.leaves) == 0: self.root = 0 else: try: t = self.leaves[:] while len(t) > 1: if len(t) % 2 != 0: t.append(int.from_bytes(b'\x00' * 32, 'big')) t = [ self.my_hash(t[i].to_bytes(32, 'big') + t[i + 1].to_bytes(32, 'big')) for i in range(0, len(t), 2) ] self.root = t[0] except: debug('MerkleTree update, leaves :', self.leaves) raise
def calc_expected_target(self, chain, previous_block): ''' Given self, chain, and previous_block, calculate the expected target. Currently using same method as Bitcoin ''' if self.previous_blocks[0] == 0: return Header.DEFAULT_TARGET if self.height % Header.RETARGET_PERIOD != 0: return previous_block.header.target # todo: is this only going to work for retarget periods of a power of 2? old_ancestor = chain.get_block(self.previous_blocks[(Header.RETARGET_PERIOD - 1).bit_length()]) timedelta = self.timestamp - old_ancestor.header.timestamp expected_timedelta = 60 * 60 * 24 * Header.RETARGET_PERIOD // Header.BLOCKS_PER_DAY timedelta = max(timedelta, expected_timedelta // 4) timedelta = min(timedelta, expected_timedelta * 4) new_target = previous_block.header.target * timedelta // expected_timedelta new_target = min(new_target, self._TARGET1 - 1) debug('New Target Calculated: %064x, height: %d' % (new_target, self.height)) return new_target
def mine(self, provided_block=None): print(provided_block) while not self._shutdown: self._restart = False if provided_block is None: block = self.chain.head.get_candidate(self.chain) if hasattr(block, 'update_roots'): block.update_roots() else: block = provided_block count = 0 debug('miner (re)starting', block.serialize(), self._shutdown, self._restart) while not self._shutdown and not self._restart: count += 1 block.increment_nonce() if block.valid_proof(): try: block.assert_internal_consistency() break except ValidationError as e: debug('Miner: invalid block generated: %s' % block.serialize()) continue if self._shutdown: break provided_block = None if self._restart: self._restart = False time.sleep(0.01) print('miner -restarting') continue debug('Miner: Found Soln : %064x' % block.get_hash(), block.serialize()) if block.height == 0: # print genesis debug('Miner: ser\'d genesis block: ', block.serialize()) break # break and let chain restart miner self.seek_n_build.add_block( block.__class__.deserialize(block.serialize())) debug('Miner: submitted block') while not self._restart and not self._shutdown: time.sleep(0.02) print('miner: ended loop')
def find_lca(self, block_hash_a, block_hash_b): ''' This finds the LCA of two blocks given their hashes. Currently walks through each blocks parents in turn until a match is found and returns that match. ''' mutual_history = set() debug('Chain.find_lca', block_hash_a, block_hash_b) blocks = [self.get_block(block_hash_a), self.get_block(block_hash_b)] while True: if blocks[0].get_hash() == blocks[1].get_hash(): return blocks[0] if blocks[0].parent_hash == 0 and blocks[1].parent_hash == 0: raise ChainError('No LCA - different chains.') for i in range(len(blocks)): block_hash = blocks[i].get_hash() if block_hash in mutual_history: # then this block is LCA return blocks[i] mutual_history.add(block_hash) if blocks[i].parent_hash != 0: blocks[i] = self.get_block(blocks[i].parent_hash)
def calc_expected_target(header, previous_block, chain): ''' given a header and previous_block, calculate the expected target ''' if header.previous_blocks[0] == 0: return GrachtenHeader.DEFAULT_TARGET if header.height % GrachtenHeader.RETARGET_PERIOD != 0: return previous_block.header.target old_ancestor = chain.get_block( header.previous_blocks[(GrachtenHeader.RETARGET_PERIOD - 1).bit_length()]) timedelta = header.timestamp - old_ancestor.header.timestamp expected_timedelta = 60 * 60 * 24 * GrachtenHeader.RETARGET_PERIOD // GrachtenHeader.BLOCKS_PER_DAY if timedelta < expected_timedelta // 4: timedelta = expected_timedelta // 4 if timedelta > expected_timedelta * 4: timedelta = expected_timedelta * 4 new_target = previous_block.header.target * timedelta // expected_timedelta debug('New Target Calculated: %064x, height: %d' % (new_target, header.height)) return new_target
def on_transaction(self, tx, block, chain): ''' Process a transaction. tx has following info (subject to change): tx.value, tx.fee, tx.data, tx.sender, tx.dapp tx.value >= 0 not >0 so 0 value txs (like notification of new foreign blocks) can be free ''' self.assert_true(tx.value >= 0, 'tx.value must be greater than or equal to 0') self.assert_true(tx.fee >= 0, 'tx.fee must be greater than or equal to 0') self.assert_true(tx.donation >= 0, 'tx.donation must be greater than or equal to 0') debug('TxPrism.on_transaction', tx.sender, tx.value) self.assert_true( self.state[tx.sender] >= tx.value + tx.fee + tx.donation, 'sender must have enough funds') self.state[tx.sender] -= tx.value + tx.fee + tx.donation self.state[TxPrism.KNOWN_PUBKEY_X] += tx.fee self.state[TxPrism.EUDEMONIA_PUBKEY_X] += tx.donation if tx.dapp == b'': self.assert_true( len(tx.data) == 1, 'Only one recipient allowed when sending to root dapp') self.assert_true( len(tx.data[0]) <= 33, 'recipient must be <= 32 bytes long (should be pubkey_x)') recipient = tx.data[0] self.state[recipient] += tx.value else: self.assert_true(tx.dapp in self.state_maker.dapps, 'dapp must be known') self.state[tx.dapp] += tx.value self.state_maker.dapps[tx.dapp].on_transaction(tx, block, chain) self.state.recursively_print_state() debug('TxPrism.on_tx: super_state hash', self.super_state.get_hash())
def create_match_and_change(bid, ask): # todo: figure out how to do rates - duh, just work in large numbers - make sure to test scaling works okay # todo: figure out limits of accuracy to warn users trade_rate = (bid.rate + ask.rate) // 2 debug('create_match_and_change', bid.rate, ask.rate, trade_rate, Market.Order.RATE_CONSTANT) bid_xmk = bid.amount * Market.Order.RATE_CONSTANT // trade_rate ask_xmk = ask.amount debug('create_match_and_change, min bid, max ask: ', bid_xmk, ask_xmk) if ask_xmk == bid_xmk: trade_volume = bid_xmk change = None alt_pledge_amount = bid.pledge else: trade_volume = min(bid_xmk, ask_xmk) alt_pledge_amount = bid.pledge * bid_xmk // trade_volume if trade_volume == bid_xmk: # IE there is alt change change = Market.Order.make_change(ask, amount=ask_xmk - bid_xmk) elif trade_volume == ask_xmk: # IE there is xmk change alt_change_amount = Market.Order.calculate_rate( bid.rate, xmk=bid_xmk - trade_volume) change = Market.Order.make_change(bid, amount=alt_change_amount, pledge=bid.pledge - alt_pledge_amount) else: raise ValidationError('Wtf, how\'d you get here?!') foreign_amount = Market.Order.calculate_rate(trade_rate, xmk=trade_volume) match = Market.OrderMatch( pay_to_pubkey_hash=ask.pay_to_pubkey_hash, success_output=bid.sender, fail_output=ask.sender, foreign_amount=foreign_amount, local_amount=trade_volume, pledge_amount=alt_pledge_amount, rate=trade_rate) return (match, change)
def calc_expected_target(self, chain, previous_block): ''' Given self, chain, and previous_block, calculate the expected target. Currently using same method as Bitcoin ''' if self.previous_blocks[0] == 0: return Header.DEFAULT_TARGET if self.height % Header.RETARGET_PERIOD != 0: return previous_block.header.target # todo: is this only going to work for retarget periods of a power of 2? old_ancestor = chain.get_block( self.previous_blocks[(Header.RETARGET_PERIOD - 1).bit_length()]) timedelta = self.timestamp - old_ancestor.header.timestamp expected_timedelta = 60 * 60 * 24 * Header.RETARGET_PERIOD // Header.BLOCKS_PER_DAY timedelta = max(timedelta, expected_timedelta // 4) timedelta = min(timedelta, expected_timedelta * 4) new_target = previous_block.header.target * timedelta // expected_timedelta new_target = min(new_target, self._TARGET1 - 1) debug('New Target Calculated: %064x, height: %d' % (new_target, self.height)) return new_target
def get_hash(self): leaves = [] names = list(self.state_dict.keys()) # all names are bytes, need to investigate exactly how these are sorted. names.sort() debug('SuperState: names', names) for n in names: leaves.extend([global_hash(n), self.state_dict[n].get_hash()]) # h(name), h(state) pairs debug('SuperState: leaves', leaves) merkle_root = MerkleLeavesToRoot(leaves=leaves) debug('SuperState: MR', merkle_root.get_hash()) return merkle_root.get_hash()