def scrypt(data): return pack.IntType(256).unpack( __import__('scryptSquared').getPoWHash(data))
def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None, segwit_data=None): previous_share = tracker.items[ share_data['previous_share_hash']] if share_data[ 'previous_share_hash'] is not None else None height, last = tracker.get_height_and_last( share_data['previous_share_hash']) assert height >= net.REAL_CHAIN_LENGTH or last is None if height < net.TARGET_LOOKBEHIND: pre_target3 = net.MAX_TARGET else: attempts_per_second = get_pool_attempts_per_second( tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True) pre_target = 2**256 // ( net.SHARE_PERIOD * attempts_per_second) - 1 if attempts_per_second else 2**256 - 1 pre_target2 = math.clip(pre_target, (previous_share.max_target * 9 // 10, previous_share.max_target * 11 // 10)) pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET)) max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound( pre_target3) bits = bitcoin_data.FloatingInteger.from_target_upper_bound( math.clip(desired_target, (pre_target3 // 30, pre_target3))) new_transaction_hashes = [] new_transaction_size = 0 transaction_hash_refs = [] other_transaction_hashes = [] past_shares = list( tracker.get_chain(share_data['previous_share_hash'], min(height, 100))) tx_hash_to_this = {} for i, share in enumerate(past_shares): for j, tx_hash in enumerate(share.new_transaction_hashes): if tx_hash not in tx_hash_to_this: tx_hash_to_this[tx_hash] = [1 + i, j] # share_count, tx_count for tx_hash, fee in desired_other_transaction_hashes_and_fees: if tx_hash in tx_hash_to_this: this = tx_hash_to_this[tx_hash] else: if known_txs is not None: this_size = bitcoin_data.tx_type.packed_size( known_txs[tx_hash]) if new_transaction_size + this_size > cls.MAX_NEW_TXS_SIZE: # limit the size of new txns/share break new_transaction_size += this_size new_transaction_hashes.append(tx_hash) this = [0, len(new_transaction_hashes) - 1] transaction_hash_refs.extend(this) other_transaction_hashes.append(tx_hash) included_transactions = set(other_transaction_hashes) removed_fees = [ fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions ] definite_fees = sum( 0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions) if None not in removed_fees: share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees)) else: assert base_subsidy is not None share_data = dict(share_data, subsidy=base_subsidy + definite_fees) weights, total_weight, donation_weight = tracker.get_cumulative_weights( previous_share.share_data['previous_share_hash'] if previous_share is not None else None, max(0, min(height, net.REAL_CHAIN_LENGTH) - 1), 65535 * net.SPREAD * bitcoin_data.target_to_average_attempts(block_target), ) assert total_weight == sum(weights.itervalues()) + donation_weight, ( total_weight, sum(weights.itervalues()) + donation_weight) amounts = dict( (script, share_data['subsidy'] * (199 * weight) // (200 * total_weight)) for script, weight in weights.iteritems( )) # 99.5% goes according to weights prior to this share this_script = bitcoin_data.pubkey_hash_to_script2( share_data['pubkey_hash'], share_data['pubkey_hash_version'], net.PARENT) amounts[this_script] = amounts.get( this_script, 0) + share_data['subsidy'] // 200 # 0.5% goes to block finder amounts[DONATION_SCRIPT] = amounts.get( DONATION_SCRIPT, 0 ) + share_data['subsidy'] - sum( amounts.itervalues() ) # all that's left over is the donation weight and some extra satoshis due to rounding if sum(amounts.itervalues()) != share_data['subsidy'] or any( x < 0 for x in amounts.itervalues()): raise ValueError() dests = sorted( amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[ -4000:] # block length limit, unlikely to ever be hit segwit_activated = is_segwit_activated(cls.VERSION, net) if segwit_data is None and known_txs is None: segwit_activated = False if not (segwit_activated or known_txs is None) and any( bitcoin_data.is_segwit_tx(known_txs[h]) for h in other_transaction_hashes): raise ValueError('segwit transaction included before activation') if segwit_activated and known_txs is not None: share_txs = [(known_txs[h], bitcoin_data.get_txid(known_txs[h]), h) for h in other_transaction_hashes] segwit_data = dict( txid_merkle_link=bitcoin_data.calculate_merkle_link( [None] + [tx[1] for tx in share_txs], 0), wtxid_merkle_root=bitcoin_data.merkle_hash([0] + [ bitcoin_data.get_wtxid(tx[0], tx[1], tx[2]) for tx in share_txs ])) if segwit_activated and segwit_data is not None: witness_reserved_value_str = '[P2Pool]' * 4 witness_reserved_value = pack.IntType(256).unpack( witness_reserved_value_str) witness_commitment_hash = bitcoin_data.get_witness_commitment_hash( segwit_data['wtxid_merkle_root'], witness_reserved_value) share_info = dict( share_data=share_data, far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99), max_bits=max_bits, bits=bits, timestamp=math.clip( desired_timestamp, ( (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1 (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1), )) if previous_share is not None else desired_timestamp, new_transaction_hashes=new_transaction_hashes, transaction_hash_refs=transaction_hash_refs, absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32, abswork=( (previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128, ) if segwit_activated: share_info['segwit_data'] = segwit_data gentx = dict( version=1, tx_ins=[ dict( previous_output=None, sequence=None, script=share_data['coinbase'], ) ], tx_outs=([ dict(value=0, script='\x6a\x24\xaa\x21\xa9\xed' + pack.IntType(256).pack(witness_commitment_hash)) ] if segwit_activated else []) + [ dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT ] + [ dict(value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce)) ], lock_time=0, ) if segwit_activated: gentx['marker'] = 0 gentx['flag'] = 1 gentx['witness'] = [[witness_reserved_value_str]] def get_share(header, last_txout_nonce=last_txout_nonce): min_header = dict(header) del min_header['merkle_root'] share = cls( net, None, dict( min_header=min_header, share_info=share_info, ref_merkle_link=dict(branch=[], index=0), last_txout_nonce=last_txout_nonce, hash_link=prefix_to_hash_link( bitcoin_data.tx_id_type.pack(gentx)[:-32 - 8 - 4], cls.gentx_before_refhash), merkle_link=bitcoin_data.calculate_merkle_link( [None] + other_transaction_hashes, 0), )) assert share.header == header # checks merkle_root return share return share_info, gentx, other_transaction_hashes, get_share
class BaseShare(object): VERSION = 0 VOTING_VERSION = 0 SUCCESSOR = None MAX_BLOCK_WEIGHT = 4000000 MAX_NEW_TXS_SIZE = 50000 small_block_header_type = pack.ComposedType([ ('version', pack.VarIntType()), ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))), ('timestamp', pack.IntType(32)), ('bits', bitcoin_data.FloatingIntegerType()), ('nonce', pack.IntType(32)), ]) share_info_type = None share_type = None ref_type = None gentx_before_refhash = pack.VarStrType().pack( DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack( '\x6a\x28' + pack.IntType(256).pack(0) + pack.IntType(64).pack(0))[:3] @classmethod def get_dynamic_types(cls, net): t = dict(share_info_type=None, share_type=None, ref_type=None) segwit_data = ( 'segwit_data', pack.PossiblyNoneType( dict(txid_merkle_link=dict(branch=[], index=0), wtxid_merkle_root=2**256 - 1), pack.ComposedType([ ( 'txid_merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), # it will always be 0 ])), ('wtxid_merkle_root', pack.IntType(256)) ]))) t['share_info_type'] = pack.ComposedType( [('share_data', pack.ComposedType([ ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ('coinbase', pack.VarStrType()), ('nonce', pack.IntType(32)), ('pubkey_hash', pack.IntType(160)), ('pubkey_hash_version', pack.IntType(8)), ('subsidy', pack.IntType(64)), ('donation', pack.IntType(16)), ('stale_info', pack.StaleInfoEnumType()), ('desired_version', pack.VarIntType()), ]))] + ([segwit_data] if is_segwit_activated(cls.VERSION, net) else []) + [ ('new_transaction_hashes', pack.ListType(pack.IntType(256))), ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2) ), # pairs of share_count, tx_count ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ('max_bits', bitcoin_data.FloatingIntegerType()), ('bits', bitcoin_data.FloatingIntegerType()), ('timestamp', pack.IntType(32)), ('absheight', pack.IntType(32)), ('abswork', pack.IntType(128)), ]) t['share_type'] = pack.ComposedType([ ('min_header', cls.small_block_header_type), ('share_info', t['share_info_type']), ('ref_merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), ])), ('last_txout_nonce', pack.IntType(64)), ('hash_link', hash_link_type), ( 'merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), # it will always be 0 ])), ]) t['ref_type'] = pack.ComposedType([ ('identifier', pack.FixedStrType(64 // 8)), ('share_info', t['share_info_type']), ]) return t @classmethod def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None, segwit_data=None): previous_share = tracker.items[ share_data['previous_share_hash']] if share_data[ 'previous_share_hash'] is not None else None height, last = tracker.get_height_and_last( share_data['previous_share_hash']) assert height >= net.REAL_CHAIN_LENGTH or last is None if height < net.TARGET_LOOKBEHIND: pre_target3 = net.MAX_TARGET else: attempts_per_second = get_pool_attempts_per_second( tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True) pre_target = 2**256 // ( net.SHARE_PERIOD * attempts_per_second) - 1 if attempts_per_second else 2**256 - 1 pre_target2 = math.clip(pre_target, (previous_share.max_target * 9 // 10, previous_share.max_target * 11 // 10)) pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET)) max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound( pre_target3) bits = bitcoin_data.FloatingInteger.from_target_upper_bound( math.clip(desired_target, (pre_target3 // 30, pre_target3))) new_transaction_hashes = [] new_transaction_size = 0 transaction_hash_refs = [] other_transaction_hashes = [] past_shares = list( tracker.get_chain(share_data['previous_share_hash'], min(height, 100))) tx_hash_to_this = {} for i, share in enumerate(past_shares): for j, tx_hash in enumerate(share.new_transaction_hashes): if tx_hash not in tx_hash_to_this: tx_hash_to_this[tx_hash] = [1 + i, j] # share_count, tx_count for tx_hash, fee in desired_other_transaction_hashes_and_fees: if tx_hash in tx_hash_to_this: this = tx_hash_to_this[tx_hash] else: if known_txs is not None: this_size = bitcoin_data.tx_type.packed_size( known_txs[tx_hash]) if new_transaction_size + this_size > cls.MAX_NEW_TXS_SIZE: # limit the size of new txns/share break new_transaction_size += this_size new_transaction_hashes.append(tx_hash) this = [0, len(new_transaction_hashes) - 1] transaction_hash_refs.extend(this) other_transaction_hashes.append(tx_hash) included_transactions = set(other_transaction_hashes) removed_fees = [ fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions ] definite_fees = sum( 0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions) if None not in removed_fees: share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees)) else: assert base_subsidy is not None share_data = dict(share_data, subsidy=base_subsidy + definite_fees) weights, total_weight, donation_weight = tracker.get_cumulative_weights( previous_share.share_data['previous_share_hash'] if previous_share is not None else None, max(0, min(height, net.REAL_CHAIN_LENGTH) - 1), 65535 * net.SPREAD * bitcoin_data.target_to_average_attempts(block_target), ) assert total_weight == sum(weights.itervalues()) + donation_weight, ( total_weight, sum(weights.itervalues()) + donation_weight) amounts = dict( (script, share_data['subsidy'] * (199 * weight) // (200 * total_weight)) for script, weight in weights.iteritems( )) # 99.5% goes according to weights prior to this share this_script = bitcoin_data.pubkey_hash_to_script2( share_data['pubkey_hash'], share_data['pubkey_hash_version'], net.PARENT) amounts[this_script] = amounts.get( this_script, 0) + share_data['subsidy'] // 200 # 0.5% goes to block finder amounts[DONATION_SCRIPT] = amounts.get( DONATION_SCRIPT, 0 ) + share_data['subsidy'] - sum( amounts.itervalues() ) # all that's left over is the donation weight and some extra satoshis due to rounding if sum(amounts.itervalues()) != share_data['subsidy'] or any( x < 0 for x in amounts.itervalues()): raise ValueError() dests = sorted( amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[ -4000:] # block length limit, unlikely to ever be hit segwit_activated = is_segwit_activated(cls.VERSION, net) if segwit_data is None and known_txs is None: segwit_activated = False if not (segwit_activated or known_txs is None) and any( bitcoin_data.is_segwit_tx(known_txs[h]) for h in other_transaction_hashes): raise ValueError('segwit transaction included before activation') if segwit_activated and known_txs is not None: share_txs = [(known_txs[h], bitcoin_data.get_txid(known_txs[h]), h) for h in other_transaction_hashes] segwit_data = dict( txid_merkle_link=bitcoin_data.calculate_merkle_link( [None] + [tx[1] for tx in share_txs], 0), wtxid_merkle_root=bitcoin_data.merkle_hash([0] + [ bitcoin_data.get_wtxid(tx[0], tx[1], tx[2]) for tx in share_txs ])) if segwit_activated and segwit_data is not None: witness_reserved_value_str = '[P2Pool]' * 4 witness_reserved_value = pack.IntType(256).unpack( witness_reserved_value_str) witness_commitment_hash = bitcoin_data.get_witness_commitment_hash( segwit_data['wtxid_merkle_root'], witness_reserved_value) share_info = dict( share_data=share_data, far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99), max_bits=max_bits, bits=bits, timestamp=math.clip( desired_timestamp, ( (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1 (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1), )) if previous_share is not None else desired_timestamp, new_transaction_hashes=new_transaction_hashes, transaction_hash_refs=transaction_hash_refs, absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32, abswork=( (previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128, ) if segwit_activated: share_info['segwit_data'] = segwit_data gentx = dict( version=1, tx_ins=[ dict( previous_output=None, sequence=None, script=share_data['coinbase'], ) ], tx_outs=([ dict(value=0, script='\x6a\x24\xaa\x21\xa9\xed' + pack.IntType(256).pack(witness_commitment_hash)) ] if segwit_activated else []) + [ dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT ] + [ dict(value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce)) ], lock_time=0, ) if segwit_activated: gentx['marker'] = 0 gentx['flag'] = 1 gentx['witness'] = [[witness_reserved_value_str]] def get_share(header, last_txout_nonce=last_txout_nonce): min_header = dict(header) del min_header['merkle_root'] share = cls( net, None, dict( min_header=min_header, share_info=share_info, ref_merkle_link=dict(branch=[], index=0), last_txout_nonce=last_txout_nonce, hash_link=prefix_to_hash_link( bitcoin_data.tx_id_type.pack(gentx)[:-32 - 8 - 4], cls.gentx_before_refhash), merkle_link=bitcoin_data.calculate_merkle_link( [None] + other_transaction_hashes, 0), )) assert share.header == header # checks merkle_root return share return share_info, gentx, other_transaction_hashes, get_share @classmethod def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack( bitcoin_data.check_merkle_link( bitcoin_data.hash256( cls.get_dynamic_types(net)['ref_type'].pack( dict( identifier=net.IDENTIFIER, share_info=share_info, ))), ref_merkle_link)) __slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split( ' ') def __init__(self, net, peer_addr, contents): dynamic_types = self.get_dynamic_types(net) self.share_info_type = dynamic_types['share_info_type'] self.share_type = dynamic_types['share_type'] self.ref_type = dynamic_types['ref_type'] self.net = net self.peer_addr = peer_addr self.contents = contents self.min_header = contents['min_header'] self.share_info = contents['share_info'] self.hash_link = contents['hash_link'] self.merkle_link = contents['merkle_link'] segwit_activated = is_segwit_activated(self.VERSION, net) if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100): raise ValueError( '''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']), )) if len(self.merkle_link['branch']) > 16 or (segwit_activated and len( self.share_info['segwit_data']['txid_merkle_link']['branch']) > 16): raise ValueError('merkle branch too long!') assert not self.hash_link['extra_data'], repr( self.hash_link['extra_data']) self.share_data = self.share_info['share_data'] self.max_target = self.share_info['max_bits'].target self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.previous_hash = self.share_data['previous_share_hash'] self.new_script = bitcoin_data.pubkey_hash_to_script2( self.share_data['pubkey_hash'], self.share_data['pubkey_hash_version'], net.PARENT) self.desired_version = self.share_data['desired_version'] self.absheight = self.share_info['absheight'] self.abswork = self.share_info['abswork'] n = set() for share_count, tx_count in self.iter_transaction_hash_refs(): assert share_count < 110 if share_count == 0: n.add(tx_count) assert n == set(range(len(self.share_info['new_transaction_hashes']))) self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = bitcoin_data.check_merkle_link( self.gentx_hash, self.share_info['segwit_data']['txid_merkle_link'] if segwit_activated else self.merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(self.header)) self.hash = self.header_hash = bitcoin_data.hash256( bitcoin_data.block_header_type.pack(self.header)) if self.target > net.MAX_TARGET: from p2pool import p2p raise p2p.PeerMisbehavingError('share target invalid') if self.pow_hash > self.target: from p2pool import p2p raise p2p.PeerMisbehavingError('share PoW invalid') self.new_transaction_hashes = self.share_info['new_transaction_hashes'] # XXX eww self.time_seen = time.time() def __repr__(self): return 'Share' + repr((self.net, self.peer_addr, self.contents)) def as_share(self): return dict(type=self.VERSION, contents=self.share_type.pack(self.contents)) def iter_transaction_hash_refs(self): return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2]) def check(self, tracker, other_txs=None): from p2pool import p2p counts = None if self.share_data['previous_share_hash'] is not None: previous_share = tracker.items[ self.share_data['previous_share_hash']] if tracker.get_height(self.share_data['previous_share_hash'] ) >= self.net.CHAIN_LENGTH: counts = get_desired_version_counts( tracker, tracker.get_nth_parent_hash( previous_share.hash, self.net.CHAIN_LENGTH * 9 // 10), self.net.CHAIN_LENGTH // 10) if type(self) is type(previous_share): pass elif type(self) is type(previous_share).SUCCESSOR: # switch only valid if 60% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version if counts.get(self.VERSION, 0) < sum(counts.itervalues()) * 60 // 100: raise p2p.PeerMisbehavingError( 'switch without enough hash power upgraded') else: raise p2p.PeerMisbehavingError( '''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__)) elif type(self) is type(previous_share).SUCCESSOR: raise p2p.PeerMisbehavingError('switch without enough history') other_tx_hashes = [ tracker.items[tracker.get_nth_parent_hash( self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs() ] if other_txs is not None and not isinstance(other_txs, dict): other_txs = dict( (bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)), tx) for tx in other_txs) share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction( tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, known_txs=other_txs, last_txout_nonce=self.contents['last_txout_nonce'], segwit_data=self.share_info.get('segwit_data', None)) assert other_tx_hashes2 == other_tx_hashes if share_info != self.share_info: raise ValueError('share_info invalid') if bitcoin_data.get_txid(gentx) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') if bitcoin_data.calculate_merkle_link( [None] + other_tx_hashes, 0 ) != self.merkle_link: # the other hash commitments are checked in the share_info assertion raise ValueError('merkle_link and other_tx_hashes do not match') update_min_protocol_version(counts, self) return gentx # only used by as_block def get_other_tx_hashes(self, tracker): parents_needed = max( share_count for share_count, tx_count in self.iter_transaction_hash_refs( )) if self.share_info['transaction_hash_refs'] else 0 parents = tracker.get_height(self.hash) - 1 if parents < parents_needed: return None last_shares = list(tracker.get_chain(self.hash, parents_needed + 1)) return [ last_shares[share_count].share_info['new_transaction_hashes'] [tx_count] for share_count, tx_count in self.iter_transaction_hash_refs() ] def _get_other_txs(self, tracker, known_txs): other_tx_hashes = self.get_other_tx_hashes(tracker) if other_tx_hashes is None: return None # not all parents present if not all(tx_hash in known_txs for tx_hash in other_tx_hashes): return None # not all txs present return [known_txs[tx_hash] for tx_hash in other_tx_hashes] def should_punish_reason(self, previous_block, bits, tracker, known_txs): if (self.header['previous_block'], self.header['bits']) != ( previous_block, bits ) and self.header_hash != previous_block and self.peer_addr is not None: return True, 'Block-stale detected! height(%x) < height(%x) or %08x != %08x' % ( self.header['previous_block'], previous_block, self.header['bits'].bits, bits.bits) if self.pow_hash <= self.header['bits'].target: return -1, 'block solution' other_txs = self._get_other_txs(tracker, known_txs) if other_txs is None: pass else: all_txs_size = sum( bitcoin_data.tx_type.packed_size(tx) for tx in other_txs) stripped_txs_size = sum( bitcoin_data.tx_id_type.packed_size(tx) for tx in other_txs) if all_txs_size + 3 * stripped_txs_size > self.MAX_BLOCK_WEIGHT: return True, 'txs over block size limit' new_txs_size = sum( bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes']) if new_txs_size > self.MAX_NEW_TXS_SIZE: return True, 'new txs over limit' return False, None def as_block(self, tracker, known_txs): other_txs = self._get_other_txs(tracker, known_txs) if other_txs is None: return None # not all txs present return dict(header=self.header, txs=[self.check(tracker, other_txs)] + other_txs)
def sha3(data): return pack.IntType(256).unpack(hashlib.sha3_256(data).digest())
def get_dynamic_types(cls, net): t = dict(share_info_type=None, share_type=None, ref_type=None) segwit_data = ( 'segwit_data', pack.PossiblyNoneType( dict(txid_merkle_link=dict(branch=[], index=0), wtxid_merkle_root=2**256 - 1), pack.ComposedType([ ( 'txid_merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), # it will always be 0 ])), ('wtxid_merkle_root', pack.IntType(256)) ]))) t['share_info_type'] = pack.ComposedType( [('share_data', pack.ComposedType([ ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ('coinbase', pack.VarStrType()), ('nonce', pack.IntType(32)), ('pubkey_hash', pack.IntType(160)), ('pubkey_hash_version', pack.IntType(8)), ('subsidy', pack.IntType(64)), ('donation', pack.IntType(16)), ('stale_info', pack.StaleInfoEnumType()), ('desired_version', pack.VarIntType()), ]))] + ([segwit_data] if is_segwit_activated(cls.VERSION, net) else []) + [ ('new_transaction_hashes', pack.ListType(pack.IntType(256))), ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2) ), # pairs of share_count, tx_count ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ('max_bits', bitcoin_data.FloatingIntegerType()), ('bits', bitcoin_data.FloatingIntegerType()), ('timestamp', pack.IntType(32)), ('absheight', pack.IntType(32)), ('abswork', pack.IntType(128)), ]) t['share_type'] = pack.ComposedType([ ('min_header', cls.small_block_header_type), ('share_info', t['share_info_type']), ('ref_merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), ])), ('last_txout_nonce', pack.IntType(64)), ('hash_link', hash_link_type), ( 'merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), # it will always be 0 ])), ]) t['ref_type'] = pack.ComposedType([ ('identifier', pack.FixedStrType(64 // 8)), ('share_info', t['share_info_type']), ]) return t
import os import platform from twisted.internet import defer from .. import data, helper from p2pool.util import pack P2P_PREFIX = 'fbc0b6db'.decode('hex') # same as litecoin P2P_PORT = 9401 ADDRESS_VERSION = 50 # M RPC_PORT = 9402 RPC_CHECK = lambda bitcoind: True SUBSIDY_FUNC = lambda height: 50 * 100000000 >> (height + 1) // 1051200 POW_FUNC = lambda data: pack.IntType(256).unpack( __import__('lyra2re2_hash').getPoWHash(data)) BLOCK_PERIOD = 90 # s SYMBOL = 'MONA' CONF_FILE_FUNC = lambda: os.path.join( os.path.join(os.environ['APPDATA'], 'Monacoin') if platform.system() == 'Windows' else os.path .expanduser('~/Library/Application Support/Monacoin/') if platform.system( ) == 'Darwin' else os.path.expanduser('~/.monacoin'), 'monacoin.conf') BLOCK_EXPLORER_URL_PREFIX = 'https://bchain.info/MONA/block/' ADDRESS_EXPLORER_URL_PREFIX = 'https://bchain.info/MONA/addr/' TX_EXPLORER_URL_PREFIX = 'https://bchain.info/MONA/tx/' SANE_TARGET_RANGE = (2**256 // 1000000000000000000 - 1, 2**256 // 100000 - 1) DUMB_SCRYPT_DIFF = 256 DUST_THRESHOLD = 0.03e8
self.bits), hex(self.target)) class FloatingIntegerType(pack.Type): _inner = pack.IntType(32) def read(self, file): bits, file = self._inner.read(file) return FloatingInteger(bits), file def write(self, file, item): return self._inner.write(file, item.bits) address_type = pack.ComposedType([ ('services', pack.IntType(64)), ('address', pack.IPV6AddressType()), ('port', pack.IntType(16, 'big')), ]) tx_type = pack.ComposedType([ ('version', pack.IntType(32)), ('tx_ins', pack.ListType( pack.ComposedType([ ('previous_output', pack.PossiblyNoneType( dict(hash=0, index=2**32 - 1), pack.ComposedType([ ('hash', pack.IntType(256)), ('index', pack.IntType(32)),
def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack(bitcoin_data.check_merkle_link(bitcoin_data.hash256(cls.get_dynamic_types(net)['ref_type'].pack(dict( identifier=net.IDENTIFIER, share_info=share_info, ))), ref_merkle_link))
def hash160(data): if data == '04ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664b'.decode('hex'): return 0x384f570ccc88ac2e7e00b026d1690a3fca63dd0 # hack for people who don't have openssl - this is the only value that p2pool ever hashes return pack.IntType(160).unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
class Protocol(p2protocol.Protocol): def __init__(self, net): p2protocol.Protocol.__init__(self, net.P2P_PREFIX, 1000000, ignore_trailing_payload=True) def connectionMade(self): self.send_version( version=70004, services=1, time=int(time.time()), addr_to=dict( services=1, address=self.transport.getPeer().host, port=self.transport.getPeer().port, ), addr_from=dict( services=1, address=self.transport.getHost().host, port=self.transport.getHost().port, ), nonce=random.randrange(2**64), sub_version_num='/P2Pool:%s/' % (p2pool.__version__,), start_height=0, ) message_version = pack.ComposedType([ ('version', pack.IntType(32)), ('services', pack.IntType(64)), ('time', pack.IntType(64)), ('addr_to', bitcoin_data.address_type), ('addr_from', bitcoin_data.address_type), ('nonce', pack.IntType(64)), ('sub_version_num', pack.VarStrType()), ('start_height', pack.IntType(32)), ]) def handle_version(self, version, services, time, addr_to, addr_from, nonce, sub_version_num, start_height): self.send_verack() message_verack = pack.ComposedType([]) def handle_verack(self): self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)])) self.get_block_header = deferral.ReplyMatcher(lambda hash: self.send_getheaders(version=1, have=[], last=hash)) if hasattr(self.factory, 'resetDelay'): self.factory.resetDelay() if hasattr(self.factory, 'gotConnection'): self.factory.gotConnection(self) self.pinger = deferral.RobustLoopingCall(self.send_ping, nonce=1234) self.pinger.start(30) message_inv = pack.ComposedType([ ('invs', pack.ListType(pack.ComposedType([ ('type', pack.EnumType(pack.IntType(32), {1: 'tx', 2: 'block'})), ('hash', pack.IntType(256)), ]))), ]) def handle_inv(self, invs): for inv in invs: if inv['type'] == 'tx': self.send_getdata(requests=[inv]) elif inv['type'] == 'block': self.factory.new_block.happened(inv['hash']) else: print 'Unknown inv type', inv message_getdata = pack.ComposedType([ ('requests', pack.ListType(pack.ComposedType([ ('type', pack.EnumType(pack.IntType(32), {1: 'tx', 2: 'block'})), ('hash', pack.IntType(256)), ]))), ]) message_getblocks = pack.ComposedType([ ('version', pack.IntType(32)), ('have', pack.ListType(pack.IntType(256))), ('last', pack.PossiblyNoneType(0, pack.IntType(256))), ]) message_getheaders = pack.ComposedType([ ('version', pack.IntType(32)), ('have', pack.ListType(pack.IntType(256))), ('last', pack.PossiblyNoneType(0, pack.IntType(256))), ]) message_getaddr = pack.ComposedType([]) message_addr = pack.ComposedType([ ('addrs', pack.ListType(pack.ComposedType([ ('timestamp', pack.IntType(32)), ('address', bitcoin_data.address_type), ]))), ]) def handle_addr(self, addrs): for addr in addrs: pass message_tx = pack.ComposedType([ ('tx', bitcoin_data.tx_type), ]) def handle_tx(self, tx): self.factory.new_tx.happened(tx) message_block = pack.ComposedType([ ('block', bitcoin_data.block_type), ]) def handle_block(self, block): block_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])) self.get_block.got_response(block_hash, block) self.get_block_header.got_response(block_hash, block['header']) message_headers = pack.ComposedType([ ('headers', pack.ListType(bitcoin_data.block_type)), ]) def handle_headers(self, headers): for header in headers: header = header['header'] self.get_block_header.got_response(bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)), header) self.factory.new_headers.happened([header['header'] for header in headers]) message_ping = pack.ComposedType([ ('nonce', pack.IntType(64)), ]) def handle_ping(self, nonce): self.send_pong(nonce=nonce) message_pong = pack.ComposedType([ ('nonce', pack.IntType(64)), ]) def handle_pong(self, nonce): pass message_alert = pack.ComposedType([ ('message', pack.VarStrType()), ('signature', pack.VarStrType()), ]) def handle_alert(self, message, signature): pass # print 'ALERT:', (message, signature) def connectionLost(self, reason): if hasattr(self.factory, 'gotConnection'): self.factory.gotConnection(None) if hasattr(self, 'pinger'): self.pinger.stop() if p2pool.DEBUG: print >>sys.stderr, 'Bitcoin connection lost. Reason:', reason.getErrorMessage()
address=dict( services=self.other_services, address=host, port=port, ), timestamp=int(time.time()), ), ]) else: if p2pool.DEBUG: print 'Advertising for incoming connections' # Ask peer to advertise what it believes our IP address to be self.send_addrme(port=port) message_version = pack.ComposedType([ ('version', pack.IntType(32)), ('services', pack.IntType(64)), ('addr_to', bitcoin_data.address_type), ('addr_from', bitcoin_data.address_type), ('nonce', pack.IntType(64)), ('sub_version', pack.VarStrType()), ('mode', pack.IntType(32)), # always 1 for legacy compatibility ('best_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ]) def handle_version(self, version, services, addr_to, addr_from, nonce, sub_version, mode, best_share_hash): if self.other_version is not None: raise PeerMisbehavingError('more than one version message') if version < getattr(self.node.net, 'MINIMUM_PROTOCOL_VERSION', 1400): raise PeerMisbehavingError('peer too old')
def test_address_to_pubkey_hash(self): assert data.address_to_pubkey_hash('1KUCp7YP5FP8ViRxhfszSUJCTAajK6viGy', networks.nets['starwels']) == pack.IntType(160).unpack('ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.decode('hex'))
def grshash256(data): return pack.IntType(256).unpack(grshash(data))
def hash160(data): # PtoPXC8MmB5VwfHFNhVTynATLSHLzq5MgF if data == '0494803BA564D117067A62408A1D85BCE6B559BE5CC30264CFD266B3196E045DAD09D2CDCBA09A752B6A78B74EC068BDB78C78BD13752639961FC839E9446D3AE8'.decode('hex'): return 0xae607a73abab3fbe742e1523b3fd3dca38d9f4e4 # hack for people who don't have openssl - this is the only value that p2pool ever hashes return pack.IntType(160).unpack(hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
class Protocol(p2protocol.Protocol): def __init__(self, net): p2protocol.Protocol.__init__(self, net.P2P_PREFIX, 3145728, ignore_trailing_payload=True) self.net = net def connectionMade(self): self.send_version( version=70208, services=1, time=int(time.time()), addr_to=dict( services=1, address=self.transport.getPeer().host, port=self.transport.getPeer().port, ), addr_from=dict( services=1, address=self.transport.getHost().host, port=self.transport.getHost().port, ), nonce=random.randrange(2**64), sub_version_num='/P2Pool:%s/' % (p2pool.__version__, ), start_height=0, ) message_version = pack.ComposedType([ ('version', pack.IntType(32)), ('services', pack.IntType(64)), ('time', pack.IntType(64)), ('addr_to', axe_data.address_type), ('addr_from', axe_data.address_type), ('nonce', pack.IntType(64)), ('sub_version_num', pack.VarStrType()), ('start_height', pack.IntType(32)), ]) def handle_version(self, version, services, time, addr_to, addr_from, nonce, sub_version_num, start_height): self.send_verack() message_verack = pack.ComposedType([]) def handle_verack(self): self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata( requests=[dict(type='block', hash=hash)])) self.get_block_header = deferral.ReplyMatcher( lambda hash: self.send_getheaders(version=1, have=[], last=hash)) if hasattr(self.factory, 'resetDelay'): self.factory.resetDelay() if hasattr(self.factory, 'gotConnection'): self.factory.gotConnection(self) self.pinger = deferral.RobustLoopingCall(self.send_ping, nonce=1234) self.pinger.start(30) message_inv = pack.ComposedType([ ('invs', pack.ListType( pack.ComposedType([ ('type', pack.EnumType( pack.IntType(32), { 1: 'tx', 2: 'block', 3: 'filtered_block', 4: 'txlock_request', 5: 'txlock_vote', 6: 'spork', 7: 'masternode_winner', 8: 'masternode_scanning_error', 9: 'budget_vote', 10: 'budget_proposal', 11: 'budget_finalized', 12: 'budget_finalized_vote', 13: 'masternode_quorum', 14: 'masternode_announce', 15: 'masternode_ping', 16: 'dstx', 17: 'governance_object', 18: 'governance_object_vote', 19: 'masternode_verify' })), ('hash', pack.IntType(256)), ]))), ]) def handle_inv(self, invs): for inv in invs: if inv['type'] == 'block': self.factory.new_block.happened(inv['hash']) elif inv['type'] == 'tx': self.send_getdata(requests=[inv]) else: if p2pool.DEBUG: print 'Unneeded inv type', inv message_getdata = pack.ComposedType([ ('requests', pack.ListType( pack.ComposedType([ ('type', pack.EnumType(pack.IntType(32), { 1: 'tx', 2: 'block' })), ('hash', pack.IntType(256)), ]))), ]) message_getblocks = pack.ComposedType([ ('version', pack.IntType(32)), ('have', pack.ListType(pack.IntType(256))), ('last', pack.PossiblyNoneType(0, pack.IntType(256))), ]) message_getheaders = pack.ComposedType([ ('version', pack.IntType(32)), ('have', pack.ListType(pack.IntType(256))), ('last', pack.PossiblyNoneType(0, pack.IntType(256))), ]) message_getaddr = pack.ComposedType([]) message_addr = pack.ComposedType([ ('addrs', pack.ListType( pack.ComposedType([ ('timestamp', pack.IntType(32)), ('address', axe_data.address_type), ]))), ]) def handle_addr(self, addrs): for addr in addrs: pass message_tx = pack.ComposedType([ ('tx', axe_data.tx_type), ]) def handle_tx(self, tx): self.factory.new_tx.happened(tx) message_block = pack.ComposedType([ ('block', axe_data.block_type), ]) def handle_block(self, block): block_hash = self.net.BLOCKHASH_FUNC( axe_data.block_header_type.pack(block['header'])) self.get_block.got_response(block_hash, block) self.get_block_header.got_response(block_hash, block['header']) message_block_old = pack.ComposedType([ ('block', axe_data.block_type_old), ]) def handle_block_old(self, block): block_hash = self.net.BLOCKHASH_FUNC( axe_data.block_header_type.pack(block['header'])) self.get_block.got_response(block_hash, block) self.get_block_header.got_response(block_hash, block['header']) message_headers = pack.ComposedType([ ('headers', pack.ListType(axe_data.block_type_old)), ]) def handle_headers(self, headers): for header in headers: header = header['header'] self.get_block_header.got_response( self.net.BLOCKHASH_FUNC( axe_data.block_header_type.pack(header)), header) self.factory.new_headers.happened( [header['header'] for header in headers]) message_ping = pack.ComposedType([ ('nonce', pack.IntType(64)), ]) def handle_ping(self, nonce): self.send_pong(nonce=nonce) message_pong = pack.ComposedType([ ('nonce', pack.IntType(64)), ]) def handle_pong(self, nonce): pass message_alert = pack.ComposedType([ ('message', pack.VarStrType()), ('signature', pack.VarStrType()), ]) def handle_alert(self, message, signature): pass # print 'ALERT:', (message, signature) def connectionLost(self, reason): if hasattr(self.factory, 'gotConnection'): self.factory.gotConnection(None) if hasattr(self, 'pinger'): self.pinger.stop() if p2pool.DEBUG: print >> sys.stderr, 'Axed connection lost. Reason:', reason.getErrorMessage( )
def pubkey_hash_to_script2(pubkey_hash, version, net): if version == 0: return '\x00\x14' + hex(pubkey_hash)[2:-1].decode("hex") if version == net.SEGWIT_ADDRESS_VERSION: return ('\xa9\x14' + pack.IntType(160).pack(pubkey_hash)) + '\x87' return '\x76\xa9' + ('\x14' + pack.IntType(160).pack(pubkey_hash)) + '\x88\xac'
def hash160(data): return pack.IntType(160).unpack( hashlib.new('ripemd160', hashlib.sha256(data).digest()).digest())
def test_address_to_pubkey_hash(self): assert data.address_to_pubkey_hash('7gnwGHt17heGpG9Crfeh4KGpYNFugPhJdh', networks.nets['mue']) == pack.IntType(160).unpack('9dc8b19033a16913b6e45437f76d0ab649e9e516'.decode('hex'))
import os import platform from twisted.internet import defer from .. import data, helper from p2pool.util import pack P2P_PREFIX = 'c8e5612c'.decode('hex') P2P_PORT = 7112 ADDRESS_VERSION = 55 SCRIPT_ADDRESS_VERSION = 10 RPC_PORT = 7111 RPC_CHECK = defer.inlineCallbacks(lambda dashd: defer.returnValue( 'paccoinaddress' in (yield dashd.rpc_help()) and not (yield dashd.rpc_getinfo())['testnet'] )) BLOCKHASH_FUNC = lambda data: pack.IntType(256).unpack(__import__('dash_hash').getPoWHash(data)) POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('dash_hash').getPoWHash(data)) BLOCK_PERIOD = 150 # s SYMBOL = 'PAC' CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'PaccoinCore') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/PaccoinCore/') if platform.system() == 'Darwin' else os.path.expanduser('~/.paccoincore'), 'dash.conf') BLOCK_EXPLORER_URL_PREFIX = 'http://explorer.paccoin.net/block/' ADDRESS_EXPLORER_URL_PREFIX = 'http://explorer.paccoin.net/address/' TX_EXPLORER_URL_PREFIX = 'http://explorer.paccoin.net/tx/' SANE_TARGET_RANGE = (2**256//2**32//1000000 - 1, 2**256//2**32 - 1) DUST_THRESHOLD = 0.001e8
from twisted.internet import defer from .. import data, helper from p2pool.util import pack P2P_PREFIX = 'af4576ee'.decode('hex') P2P_PORT = 10888 ADDRESS_VERSION = 50 RPC_PORT = 10889 RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue( (yield helper.check_genesis_block( bitcoind, '00000ffde4c020b5938441a0ea3d314bf619eff0b38f32f78f7583cffa1ea485') ) and not (yield bitcoind.rpc_getinfo())['testnet'])) SUBSIDY_FUNC = lambda height: 1000 * 100000000 >> (height + 1) // 967680 POW_FUNC = lambda data: pack.IntType(256).unpack( __import__('yescrypt_hash').getHash(data, 80)) BLOCK_PERIOD = 300 # s SYMBOL = 'XMY' CONF_FILE_FUNC = lambda: os.path.join( os.path.join(os.environ['APPDATA'], 'myriadcoin') if platform.system() == 'Windows' else os.path. expanduser('~/Library/Application Support/myriadcoin/') if platform.system( ) == 'Darwin' else os.path.expanduser('~/.myriadcoin'), 'myriadcoin.conf') BLOCK_EXPLORER_URL_PREFIX = 'http://insight-myr.cryptap.us/block/' ADDRESS_EXPLORER_URL_PREFIX = 'http://insight-myr.cryptap.us/address/' TX_EXPLORER_URL_PREFIX = 'http://insight-myr.cryptap.us/tx/' SANE_TARGET_RANGE = (2**256 // 1000000000 - 1, 2**256 // 10000 - 1) DUMB_SCRYPT_DIFF = 2**16 DUST_THRESHOLD = 0.001e8
def hash256(data): return pack.IntType(256).unpack( hashlib.sha256(hashlib.sha256(data).digest()).digest())
def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None): previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None height, last = tracker.get_height_and_last(share_data['previous_share_hash']) assert height >= net.REAL_CHAIN_LENGTH or last is None if height < net.TARGET_LOOKBEHIND: pre_target3 = net.MAX_TARGET else: attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True) pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1 pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10)) pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET)) max_bits = unio_data.FloatingInteger.from_target_upper_bound(pre_target3) bits = unio_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3))) new_transaction_hashes = [] new_transaction_size = 0 transaction_hash_refs = [] other_transaction_hashes = [] past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100))) tx_hash_to_this = {} for i, share in enumerate(past_shares): for j, tx_hash in enumerate(share.new_transaction_hashes): if tx_hash not in tx_hash_to_this: tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count for tx_hash, fee in desired_other_transaction_hashes_and_fees: if tx_hash in tx_hash_to_this: this = tx_hash_to_this[tx_hash] else: if known_txs is not None: this_size = unio_data.tx_type.packed_size(known_txs[tx_hash]) #if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share # break new_transaction_size += this_size new_transaction_hashes.append(tx_hash) this = [0, len(new_transaction_hashes)-1] transaction_hash_refs.extend(this) other_transaction_hashes.append(tx_hash) included_transactions = set(other_transaction_hashes) removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions] definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions) if None not in removed_fees: share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees)) else: assert base_subsidy is not None share_data = dict(share_data, subsidy=base_subsidy + definite_fees) weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None, max(0, min(height, net.REAL_CHAIN_LENGTH) - 1), 65535*net.SPREAD*unio_data.target_to_average_attempts(block_target), ) assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight) worker_payout = share_data['subsidy'] payments = share_data['packed_payments'] payments_tx = [] if payments is not None: for obj in payments: pm_script = unio_data.address_to_script2(obj['payee'],net.PARENT) pm_payout = obj['amount'] if pm_payout > 0: payments_tx += [dict(value=pm_payout, script=pm_script)] worker_payout -= pm_payout amounts = dict((script, worker_payout*(49*weight)//(50*total_weight)) for script, weight in weights.iteritems()) # 98% goes according to weights prior to this share this_script = unio_data.pubkey_hash_to_script2(share_data['pubkey_hash']) amounts[this_script] = amounts.get(this_script, 0) + worker_payout//50 # 2% goes to block finder amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + worker_payout - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding if sum(amounts.itervalues()) != worker_payout or any(x < 0 for x in amounts.itervalues()): raise ValueError() worker_scripts = sorted([k for k in amounts.iterkeys() if k != DONATION_SCRIPT]) worker_tx=[dict(value=amounts[script], script=script) for script in worker_scripts if amounts[script]] donation_tx = [dict(value=amounts[DONATION_SCRIPT], script=DONATION_SCRIPT)] share_info = dict( share_data=share_data, far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99), max_bits=max_bits, bits=bits, timestamp=math.clip(desired_timestamp, ( (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1 (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1), )) if previous_share is not None else desired_timestamp, new_transaction_hashes=new_transaction_hashes, transaction_hash_refs=transaction_hash_refs, absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32, abswork=((previous_share.abswork if previous_share is not None else 0) + unio_data.target_to_average_attempts(bits.target)) % 2**128, ) gentx = dict( version=1, tx_ins=[dict( previous_output=None, sequence=None, script=share_data['coinbase'], )], tx_outs=worker_tx + payments_tx + donation_tx + [dict( value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce), )], lock_time=0, ) def get_share(header, last_txout_nonce=last_txout_nonce): min_header = dict(header); del min_header['merkle_root'] share = cls(net, None, dict( min_header=min_header, share_info=share_info, ref_merkle_link=dict(branch=[], index=0), last_txout_nonce=last_txout_nonce, hash_link=prefix_to_hash_link(unio_data.tx_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash), merkle_link=unio_data.calculate_merkle_link([None] + other_transaction_hashes, 0), )) assert share.header == header # checks merkle_root return share return share_info, gentx, other_transaction_hashes, get_share
def pubkey_hash_to_script2(pubkey_hash): return '\x76\xa9' + ('\x14' + pack.IntType(160).pack(pubkey_hash)) + '\x88\xac'
def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack(unio_data.check_merkle_link(unio_data.hash256(cls.ref_type.pack(dict( identifier=net.IDENTIFIER, share_info=share_info, ))), ref_merkle_link))
def parse_bip0034(coinbase): _, opdata = script.parse(coinbase).next() bignum = pack.IntType(len(opdata) * 8).unpack(opdata) if ord(opdata[-1]) & 0x80: bignum = -bignum return (bignum, )
def check_hash_link(hash_link, data, const_ending=''): extra_length = hash_link['length'] % (512//8) assert len(hash_link['extra_data']) == max(0, extra_length - len(const_ending)) extra = (hash_link['extra_data'] + const_ending)[len(hash_link['extra_data']) + len(const_ending) - extra_length:] assert len(extra) == extra_length return pack.IntType(256).unpack(hashlib.sha256(sha256.sha256(data, (hash_link['state'], extra, 8*hash_link['length'])).digest()).digest())
def __init__(self, net, peer_addr, contents): dynamic_types = self.get_dynamic_types(net) self.share_info_type = dynamic_types['share_info_type'] self.share_type = dynamic_types['share_type'] self.ref_type = dynamic_types['ref_type'] self.net = net self.peer_addr = peer_addr self.contents = contents self.min_header = contents['min_header'] self.share_info = contents['share_info'] self.hash_link = contents['hash_link'] self.merkle_link = contents['merkle_link'] segwit_activated = is_segwit_activated(self.VERSION, net) if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100): raise ValueError( '''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']), )) if len(self.merkle_link['branch']) > 16 or (segwit_activated and len( self.share_info['segwit_data']['txid_merkle_link']['branch']) > 16): raise ValueError('merkle branch too long!') assert not self.hash_link['extra_data'], repr( self.hash_link['extra_data']) self.share_data = self.share_info['share_data'] self.max_target = self.share_info['max_bits'].target self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.previous_hash = self.share_data['previous_share_hash'] self.new_script = bitcoin_data.pubkey_hash_to_script2( self.share_data['pubkey_hash'], self.share_data['pubkey_hash_version'], net.PARENT) self.desired_version = self.share_data['desired_version'] self.absheight = self.share_info['absheight'] self.abswork = self.share_info['abswork'] n = set() for share_count, tx_count in self.iter_transaction_hash_refs(): assert share_count < 110 if share_count == 0: n.add(tx_count) assert n == set(range(len(self.share_info['new_transaction_hashes']))) self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = bitcoin_data.check_merkle_link( self.gentx_hash, self.share_info['segwit_data']['txid_merkle_link'] if segwit_activated else self.merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(self.header)) self.hash = self.header_hash = bitcoin_data.hash256( bitcoin_data.block_header_type.pack(self.header)) if self.target > net.MAX_TARGET: from p2pool import p2p raise p2p.PeerMisbehavingError('share target invalid') if self.pow_hash > self.target: from p2pool import p2p raise p2p.PeerMisbehavingError('share PoW invalid') self.new_transaction_hashes = self.share_info['new_transaction_hashes'] # XXX eww self.time_seen = time.time()
class Share(object): VERSION = 15 VOTING_VERSION = 15 SUCCESSOR = None small_block_header_type = pack.ComposedType([ ('version', pack.VarIntType()), ('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))), ('timestamp', pack.IntType(32)), ('bits', unio_data.FloatingIntegerType()), ('nonce', pack.IntType(32)), ]) share_info_type = pack.ComposedType([ ('share_data', pack.ComposedType([ ('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ('coinbase', pack.VarStrType()), ('nonce', pack.IntType(32)), ('pubkey_hash', pack.IntType(160)), ('subsidy', pack.IntType(64)), ('donation', pack.IntType(16)), ('stale_info', pack.EnumType(pack.IntType(8), dict((k, {0: None, 253: 'orphan', 254: 'doa'}.get(k, 'unk%i' % (k,))) for k in xrange(256)))), ('desired_version', pack.VarIntType()), ('payment_amount', pack.IntType(64)), ('packed_payments', pack.ListType(pack.ComposedType([ ('payee', pack.PossiblyNoneType('',pack.VarStrType())), ('amount', pack.PossiblyNoneType(0,pack.IntType(64))), ]))), ])), ('new_transaction_hashes', pack.ListType(pack.IntType(256))), ('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count ('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))), ('max_bits', unio_data.FloatingIntegerType()), ('bits', unio_data.FloatingIntegerType()), ('timestamp', pack.IntType(32)), ('absheight', pack.IntType(32)), ('abswork', pack.IntType(128)), ]) share_type = pack.ComposedType([ ('min_header', small_block_header_type), ('share_info', share_info_type), ('ref_merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), ])), ('last_txout_nonce', pack.IntType(64)), ('hash_link', hash_link_type), ('merkle_link', pack.ComposedType([ ('branch', pack.ListType(pack.IntType(256))), ('index', pack.IntType(0)), # it will always be 0 ])), ]) ref_type = pack.ComposedType([ ('identifier', pack.FixedStrType(64//8)), ('share_info', share_info_type), ]) gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x6a\x28' + pack.IntType(256).pack(0) + pack.IntType(64).pack(0))[:3] @classmethod def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None): previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None height, last = tracker.get_height_and_last(share_data['previous_share_hash']) assert height >= net.REAL_CHAIN_LENGTH or last is None if height < net.TARGET_LOOKBEHIND: pre_target3 = net.MAX_TARGET else: attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True) pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1 pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10)) pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET)) max_bits = unio_data.FloatingInteger.from_target_upper_bound(pre_target3) bits = unio_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3))) new_transaction_hashes = [] new_transaction_size = 0 transaction_hash_refs = [] other_transaction_hashes = [] past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100))) tx_hash_to_this = {} for i, share in enumerate(past_shares): for j, tx_hash in enumerate(share.new_transaction_hashes): if tx_hash not in tx_hash_to_this: tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count for tx_hash, fee in desired_other_transaction_hashes_and_fees: if tx_hash in tx_hash_to_this: this = tx_hash_to_this[tx_hash] else: if known_txs is not None: this_size = unio_data.tx_type.packed_size(known_txs[tx_hash]) #if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share # break new_transaction_size += this_size new_transaction_hashes.append(tx_hash) this = [0, len(new_transaction_hashes)-1] transaction_hash_refs.extend(this) other_transaction_hashes.append(tx_hash) included_transactions = set(other_transaction_hashes) removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions] definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions) if None not in removed_fees: share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees)) else: assert base_subsidy is not None share_data = dict(share_data, subsidy=base_subsidy + definite_fees) weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None, max(0, min(height, net.REAL_CHAIN_LENGTH) - 1), 65535*net.SPREAD*unio_data.target_to_average_attempts(block_target), ) assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight) worker_payout = share_data['subsidy'] payments = share_data['packed_payments'] payments_tx = [] if payments is not None: for obj in payments: pm_script = unio_data.address_to_script2(obj['payee'],net.PARENT) pm_payout = obj['amount'] if pm_payout > 0: payments_tx += [dict(value=pm_payout, script=pm_script)] worker_payout -= pm_payout amounts = dict((script, worker_payout*(49*weight)//(50*total_weight)) for script, weight in weights.iteritems()) # 98% goes according to weights prior to this share this_script = unio_data.pubkey_hash_to_script2(share_data['pubkey_hash']) amounts[this_script] = amounts.get(this_script, 0) + worker_payout//50 # 2% goes to block finder amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + worker_payout - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding if sum(amounts.itervalues()) != worker_payout or any(x < 0 for x in amounts.itervalues()): raise ValueError() worker_scripts = sorted([k for k in amounts.iterkeys() if k != DONATION_SCRIPT]) worker_tx=[dict(value=amounts[script], script=script) for script in worker_scripts if amounts[script]] donation_tx = [dict(value=amounts[DONATION_SCRIPT], script=DONATION_SCRIPT)] share_info = dict( share_data=share_data, far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99), max_bits=max_bits, bits=bits, timestamp=math.clip(desired_timestamp, ( (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1 (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1), )) if previous_share is not None else desired_timestamp, new_transaction_hashes=new_transaction_hashes, transaction_hash_refs=transaction_hash_refs, absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32, abswork=((previous_share.abswork if previous_share is not None else 0) + unio_data.target_to_average_attempts(bits.target)) % 2**128, ) gentx = dict( version=1, tx_ins=[dict( previous_output=None, sequence=None, script=share_data['coinbase'], )], tx_outs=worker_tx + payments_tx + donation_tx + [dict( value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce), )], lock_time=0, ) def get_share(header, last_txout_nonce=last_txout_nonce): min_header = dict(header); del min_header['merkle_root'] share = cls(net, None, dict( min_header=min_header, share_info=share_info, ref_merkle_link=dict(branch=[], index=0), last_txout_nonce=last_txout_nonce, hash_link=prefix_to_hash_link(unio_data.tx_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash), merkle_link=unio_data.calculate_merkle_link([None] + other_transaction_hashes, 0), )) assert share.header == header # checks merkle_root return share return share_info, gentx, other_transaction_hashes, get_share @classmethod def get_ref_hash(cls, net, share_info, ref_merkle_link): return pack.IntType(256).pack(unio_data.check_merkle_link(unio_data.hash256(cls.ref_type.pack(dict( identifier=net.IDENTIFIER, share_info=share_info, ))), ref_merkle_link)) __slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split(' ') def __init__(self, net, peer_addr, contents): self.net = net self.peer_addr = peer_addr self.contents = contents self.min_header = contents['min_header'] self.share_info = contents['share_info'] self.hash_link = contents['hash_link'] self.merkle_link = contents['merkle_link'] if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100): raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),)) if len(self.merkle_link['branch']) > 16: raise ValueError('merkle branch too long!') assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data']) self.share_data = self.share_info['share_data'] self.max_target = self.share_info['max_bits'].target self.target = self.share_info['bits'].target self.timestamp = self.share_info['timestamp'] self.previous_hash = self.share_data['previous_share_hash'] self.new_script = unio_data.pubkey_hash_to_script2(self.share_data['pubkey_hash']) self.desired_version = self.share_data['desired_version'] self.absheight = self.share_info['absheight'] self.abswork = self.share_info['abswork'] n = set() for share_count, tx_count in self.iter_transaction_hash_refs(): assert share_count < 110 if share_count == 0: n.add(tx_count) assert n == set(xrange(len(self.share_info['new_transaction_hashes']))) self.gentx_hash = check_hash_link( self.hash_link, self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0), self.gentx_before_refhash, ) merkle_root = unio_data.check_merkle_link(self.gentx_hash, self.merkle_link) self.header = dict(self.min_header, merkle_root=merkle_root) self.pow_hash = net.PARENT.POW_FUNC(unio_data.block_header_type.pack(self.header)) self.hash = self.header_hash = net.PARENT.BLOCKHASH_FUNC(unio_data.block_header_type.pack(self.header)) if self.target > net.MAX_TARGET: from p2pool import p2p raise p2p.PeerMisbehavingError('share target invalid') if self.pow_hash > self.target: from p2pool import p2p raise p2p.PeerMisbehavingError('share PoW invalid') self.new_transaction_hashes = self.share_info['new_transaction_hashes'] # XXX eww self.time_seen = time.time() def __repr__(self): return 'Share' + repr((self.net, self.peer_addr, self.contents)) def as_share(self): return dict(type=self.VERSION, contents=self.share_type.pack(self.contents)) def iter_transaction_hash_refs(self): return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2]) def check(self, tracker): from p2pool import p2p if self.share_data['previous_share_hash'] is not None: previous_share = tracker.items[self.share_data['previous_share_hash']] if type(self) is type(previous_share): pass elif type(self) is type(previous_share).SUCCESSOR: if tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH: from p2pool import p2p raise p2p.PeerMisbehavingError('switch without enough history') # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version counts = get_desired_version_counts(tracker, tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10) if counts.get(self.VERSION, 0) < sum(counts.itervalues())*85//100: raise p2p.PeerMisbehavingError('switch without enough hash power upgraded') else: raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__)) other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()] share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce']) assert other_tx_hashes2 == other_tx_hashes if share_info != self.share_info: raise ValueError('share_info invalid') if unio_data.hash256(unio_data.tx_type.pack(gentx)) != self.gentx_hash: raise ValueError('''gentx doesn't match hash_link''') if unio_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link: raise ValueError('merkle_link and other_tx_hashes do not match') return gentx # only used by as_block def get_other_tx_hashes(self, tracker): parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0 parents = tracker.get_height(self.hash) - 1 if parents < parents_needed: return None last_shares = list(tracker.get_chain(self.hash, parents_needed + 1)) return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()] def _get_other_txs(self, tracker, known_txs): other_tx_hashes = self.get_other_tx_hashes(tracker) if other_tx_hashes is None: return None # not all parents present if not all(tx_hash in known_txs for tx_hash in other_tx_hashes): return None # not all txs present return [known_txs[tx_hash] for tx_hash in other_tx_hashes] def should_punish_reason(self, previous_block, bits, tracker, known_txs): if (self.header['previous_block'], self.header['bits']) != (previous_block, bits) and self.header_hash != previous_block and self.peer_addr is not None: return True, 'Block-stale detected! height(%x) < height(%x) or %08x != %08x' % (self.header['previous_block'], previous_block, self.header['bits'].bits, bits.bits) if self.pow_hash <= self.header['bits'].target: return -1, 'block solution' other_txs = self._get_other_txs(tracker, known_txs) if other_txs is None: pass else: all_txs_size = sum(unio_data.tx_type.packed_size(tx) for tx in other_txs) if all_txs_size > 2000000: return True, 'txs over block size limit' ''' new_txs_size = sum(unio_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes']) if new_txs_size > 50000: return True, 'new txs over limit' ''' return False, None def as_block(self, tracker, known_txs): other_txs = self._get_other_txs(tracker, known_txs) if other_txs is None: return None # not all txs present return dict(header=self.header, txs=[self.check(tracker)] + other_txs)
import os import platform from twisted.internet import defer from .. import data, helper from p2pool.util import pack P2P_PREFIX = '80f3f8e2'.decode('hex') P2P_PORT = 19445 ADDRESS_VERSION = 111 RPC_PORT = 19446 RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue( 'mincoinaddress' in (yield bitcoind.rpc_help()) )) SUBSIDY_FUNC = lambda height: 2*100000000 POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data)) BLOCK_PERIOD = 60 # s SYMBOL = 'rMNC' CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'Mincoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/Mincoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.mincoin'), 'mincoin.conf') BLOCK_EXPLORER_URL_PREFIX = '#' ADDRESS_EXPLORER_URL_PREFIX = '#' TX_EXPLORER_URL_PREFIX = '#' SANE_TARGET_RANGE = (2**256//2**32//1000 - 1, 2**256//2 - 1) DUMB_SCRYPT_DIFF = 2**16 DUST_THRESHOLD = 0.00075e8
else: defer.returnValue(True) nets = dict( sophiacoin=math.Object( P2P_PREFIX='c3f1a405'.decode('hex'), P2P_PORT=9696, ADDRESS_VERSION=63, RPC_PORT=9695, RPC_CHECK=defer.inlineCallbacks(lambda bitcoind: defer.returnValue( 'SophiaCoinaddress' in (yield bitcoind.rpc_help()) and not (yield bitcoind.rpc_getinfo())['testnet'] )), SUBSIDY_FUNC=lambda nBits, height: __import__('sophiacoin_subsidy').getBlockBaseValue(nBits, height+1), POW_FUNC=lambda data: pack.IntType(256).unpack(__import__('sophiacoin_hash').getHash(data, 80)), BLOCK_PERIOD=60, # s SYMBOL='SPC', CONF_FILE_FUNC=lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'SophiaCoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/SophiaCoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.SophiaCoin'), 'SophiaCoin.conf'), BLOCK_EXPLORER_URL_PREFIX='', ADDRESS_EXPLORER_URL_PREFIX='', TX_EXPLORER_URL_PREFIX=, SANE_TARGET_RANGE=(2**256//2**32//1000 - 1, 2**256//2**20 - 1), DUMB_SCRYPT_DIFF=256, DUST_THRESHOLD=0.001e8, ), sophiacoin_testnet=math.Object( P2P_PREFIX='fba206d3'.decode('hex'), P2P_PORT=19695, ADDRESS_VERSION=111,